feat: 添加自定义 temperature 和 top_p (#1260)

* 在设置的高级面板里自定义temperature和top_p

* change default temperature from 0.8 to 0.5

* pref: 检查代码,增加仅 api 的接口判断

* chore: 锁定 pnpm-lock.yaml

---------

Co-authored-by: ChenZhaoYu <790348264@qq.com>
This commit is contained in:
quzard 2023-04-07 14:32:09 +08:00 committed by GitHub
parent 1187d88593
commit 86bba7d8f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 65 additions and 8 deletions

View File

@ -27,6 +27,7 @@ const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT
const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true' const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true'
let apiModel: ApiModel let apiModel: ApiModel
let model = 'gpt-3.5-turbo'
if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN)) if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN))
throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable') throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable')
@ -39,7 +40,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
if (isNotEmptyString(process.env.OPENAI_API_KEY)) { if (isNotEmptyString(process.env.OPENAI_API_KEY)) {
const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL
const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL
const model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo' model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo'
const options: ChatGPTAPIOptions = { const options: ChatGPTAPIOptions = {
apiKey: process.env.OPENAI_API_KEY, apiKey: process.env.OPENAI_API_KEY,
@ -90,13 +91,14 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
})() })()
async function chatReplyProcess(options: RequestOptions) { async function chatReplyProcess(options: RequestOptions) {
const { message, lastContext, process, systemMessage } = options const { message, lastContext, process, systemMessage, temperature, top_p } = options
try { try {
let options: SendMessageOptions = { timeoutMs } let options: SendMessageOptions = { timeoutMs }
if (apiModel === 'ChatGPTAPI') { if (apiModel === 'ChatGPTAPI') {
if (isNotEmptyString(systemMessage)) if (isNotEmptyString(systemMessage))
options.systemMessage = systemMessage options.systemMessage = systemMessage
options.completionParams = { model, temperature, top_p }
} }
if (lastContext != null) { if (lastContext != null) {

View File

@ -5,6 +5,8 @@ export interface RequestOptions {
lastContext?: { conversationId?: string; parentMessageId?: string } lastContext?: { conversationId?: string; parentMessageId?: string }
process?: (chat: ChatMessage) => void process?: (chat: ChatMessage) => void
systemMessage?: string systemMessage?: string
temperature?: number
top_p?: number
} }
export interface BalanceResponse { export interface BalanceResponse {

View File

@ -23,7 +23,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
res.setHeader('Content-type', 'application/octet-stream') res.setHeader('Content-type', 'application/octet-stream')
try { try {
const { prompt, options = {}, systemMessage } = req.body as RequestProps const { prompt, options = {}, systemMessage, temperature, top_p } = req.body as RequestProps
let firstChunk = true let firstChunk = true
await chatReplyProcess({ await chatReplyProcess({
message: prompt, message: prompt,
@ -33,6 +33,8 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
firstChunk = false firstChunk = false
}, },
systemMessage, systemMessage,
temperature,
top_p,
}) })
} }
catch (error) { catch (error) {

View File

@ -4,6 +4,8 @@ export interface RequestProps {
prompt: string prompt: string
options?: ChatContext options?: ChatContext
systemMessage: string systemMessage: string
temperature?: number
top_p?: number
} }
export interface ChatContext { export interface ChatContext {

View File

@ -1,6 +1,6 @@
import type { AxiosProgressEvent, GenericAbortSignal } from 'axios' import type { AxiosProgressEvent, GenericAbortSignal } from 'axios'
import { post } from '@/utils/request' import { post } from '@/utils/request'
import { useSettingStore } from '@/store' import { useAuthStore, useSettingStore } from '@/store'
export function fetchChatAPI<T = any>( export function fetchChatAPI<T = any>(
prompt: string, prompt: string,
@ -28,10 +28,25 @@ export function fetchChatAPIProcess<T = any>(
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void }, onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void },
) { ) {
const settingStore = useSettingStore() const settingStore = useSettingStore()
const authStore = useAuthStore()
let data: Record<string, any> = {
prompt: params.prompt,
options: params.options,
}
if (authStore.isChatGPTAPI) {
data = {
...data,
systemMessage: settingStore.systemMessage,
temperature: settingStore.temperature,
top_p: settingStore.top_p,
}
}
return post<T>({ return post<T>({
url: '/chat-process', url: '/chat-process',
data: { prompt: params.prompt, options: params.options, systemMessage: settingStore.systemMessage }, data,
signal: params.signal, signal: params.signal,
onDownloadProgress: params.onDownloadProgress, onDownloadProgress: params.onDownloadProgress,
}) })

View File

@ -1,6 +1,6 @@
<script lang="ts" setup> <script lang="ts" setup>
import { ref } from 'vue' import { ref } from 'vue'
import { NButton, NInput, useMessage } from 'naive-ui' import { NButton, NInput, NSlider, useMessage } from 'naive-ui'
import { useSettingStore } from '@/store' import { useSettingStore } from '@/store'
import type { SettingsState } from '@/store/modules/settings/helper' import type { SettingsState } from '@/store/modules/settings/helper'
import { t } from '@/locales' import { t } from '@/locales'
@ -11,6 +11,10 @@ const ms = useMessage()
const systemMessage = ref(settingStore.systemMessage ?? '') const systemMessage = ref(settingStore.systemMessage ?? '')
const temperature = ref(settingStore.temperature ?? 0.5)
const top_p = ref(settingStore.top_p ?? 1)
function updateSettings(options: Partial<SettingsState>) { function updateSettings(options: Partial<SettingsState>) {
settingStore.updateSetting(options) settingStore.updateSetting(options)
ms.success(t('common.success')) ms.success(t('common.success'))
@ -27,7 +31,7 @@ function handleReset() {
<div class="p-4 space-y-5 min-h-[200px]"> <div class="p-4 space-y-5 min-h-[200px]">
<div class="space-y-6"> <div class="space-y-6">
<div class="flex items-center space-x-4"> <div class="flex items-center space-x-4">
<span class="flex-shrink-0 w-[100px]">{{ $t('setting.role') }}</span> <span class="flex-shrink-0 w-[120px]">{{ $t('setting.role') }}</span>
<div class="flex-1"> <div class="flex-1">
<NInput v-model:value="systemMessage" type="textarea" :autosize="{ minRows: 1, maxRows: 4 }" /> <NInput v-model:value="systemMessage" type="textarea" :autosize="{ minRows: 1, maxRows: 4 }" />
</div> </div>
@ -36,7 +40,27 @@ function handleReset() {
</NButton> </NButton>
</div> </div>
<div class="flex items-center space-x-4"> <div class="flex items-center space-x-4">
<span class="flex-shrink-0 w-[100px]">&nbsp;</span> <span class="flex-shrink-0 w-[120px]">{{ $t('setting.temperature') }} </span>
<div class="flex-1">
<NSlider v-model:value="temperature" :max="1" :min="0" :step="0.1" />
</div>
<span>{{ temperature }}</span>
<NButton size="tiny" text type="primary" @click="updateSettings({ temperature })">
{{ $t('common.save') }}
</NButton>
</div>
<div class="flex items-center space-x-4">
<span class="flex-shrink-0 w-[120px]">{{ $t('setting.top_p') }} </span>
<div class="flex-1">
<NSlider v-model:value="top_p" :max="1" :min="0" :step="0.1" />
</div>
<span>{{ top_p }}</span>
<NButton size="tiny" text type="primary" @click="updateSettings({ top_p })">
{{ $t('common.save') }}
</NButton>
</div>
<div class="flex items-center space-x-4">
<span class="flex-shrink-0 w-[120px]">&nbsp;</span>
<NButton size="small" @click="handleReset"> <NButton size="small" @click="handleReset">
{{ $t('common.reset') }} {{ $t('common.reset') }}
</NButton> </NButton>

View File

@ -59,6 +59,8 @@ export default {
name: 'Name', name: 'Name',
description: 'Description', description: 'Description',
role: 'Role', role: 'Role',
temperature: 'Temperature',
top_p: 'Top_p',
resetUserInfo: 'Reset UserInfo', resetUserInfo: 'Reset UserInfo',
chatHistory: 'ChatHistory', chatHistory: 'ChatHistory',
theme: 'Theme', theme: 'Theme',

View File

@ -59,6 +59,8 @@ export default {
name: '名称', name: '名称',
description: '描述', description: '描述',
role: '角色设定', role: '角色设定',
temperature: 'Temperature',
top_p: 'Top_p',
resetUserInfo: '重置用户信息', resetUserInfo: '重置用户信息',
chatHistory: '聊天记录', chatHistory: '聊天记录',
theme: '主题', theme: '主题',

View File

@ -59,6 +59,8 @@ export default {
name: '名稱', name: '名稱',
description: '描述', description: '描述',
role: '角色設定', role: '角色設定',
temperature: 'Temperature',
top_p: 'Top_p',
resetUserInfo: '重設使用者資訊', resetUserInfo: '重設使用者資訊',
chatHistory: '紀錄', chatHistory: '紀錄',
theme: '主題', theme: '主題',

View File

@ -4,11 +4,15 @@ const LOCAL_NAME = 'settingsStorage'
export interface SettingsState { export interface SettingsState {
systemMessage: string systemMessage: string
temperature: number
top_p: number
} }
export function defaultSetting(): SettingsState { export function defaultSetting(): SettingsState {
return { return {
systemMessage: 'You are ChatGPT, a large language model trained by OpenAI. Follow the user\'s instructions carefully. Respond using markdown.', systemMessage: 'You are ChatGPT, a large language model trained by OpenAI. Follow the user\'s instructions carefully. Respond using markdown.',
temperature: 0.8,
top_p: 1,
} }
} }