feat: 添加自定义 temperature 和 top_p (#1260)
* 在设置的高级面板里自定义temperature和top_p * change default temperature from 0.8 to 0.5 * pref: 检查代码,增加仅 api 的接口判断 * chore: 锁定 pnpm-lock.yaml --------- Co-authored-by: ChenZhaoYu <790348264@qq.com>
This commit is contained in:
parent
1187d88593
commit
86bba7d8f3
|
@ -27,6 +27,7 @@ const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT
|
|||
const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true'
|
||||
|
||||
let apiModel: ApiModel
|
||||
let model = 'gpt-3.5-turbo'
|
||||
|
||||
if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN))
|
||||
throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable')
|
||||
|
@ -39,7 +40,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
|
|||
if (isNotEmptyString(process.env.OPENAI_API_KEY)) {
|
||||
const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL
|
||||
const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL
|
||||
const model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo'
|
||||
model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo'
|
||||
|
||||
const options: ChatGPTAPIOptions = {
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
|
@ -90,13 +91,14 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
|
|||
})()
|
||||
|
||||
async function chatReplyProcess(options: RequestOptions) {
|
||||
const { message, lastContext, process, systemMessage } = options
|
||||
const { message, lastContext, process, systemMessage, temperature, top_p } = options
|
||||
try {
|
||||
let options: SendMessageOptions = { timeoutMs }
|
||||
|
||||
if (apiModel === 'ChatGPTAPI') {
|
||||
if (isNotEmptyString(systemMessage))
|
||||
options.systemMessage = systemMessage
|
||||
options.completionParams = { model, temperature, top_p }
|
||||
}
|
||||
|
||||
if (lastContext != null) {
|
||||
|
|
|
@ -5,6 +5,8 @@ export interface RequestOptions {
|
|||
lastContext?: { conversationId?: string; parentMessageId?: string }
|
||||
process?: (chat: ChatMessage) => void
|
||||
systemMessage?: string
|
||||
temperature?: number
|
||||
top_p?: number
|
||||
}
|
||||
|
||||
export interface BalanceResponse {
|
||||
|
|
|
@ -23,7 +23,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
|
|||
res.setHeader('Content-type', 'application/octet-stream')
|
||||
|
||||
try {
|
||||
const { prompt, options = {}, systemMessage } = req.body as RequestProps
|
||||
const { prompt, options = {}, systemMessage, temperature, top_p } = req.body as RequestProps
|
||||
let firstChunk = true
|
||||
await chatReplyProcess({
|
||||
message: prompt,
|
||||
|
@ -33,6 +33,8 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
|
|||
firstChunk = false
|
||||
},
|
||||
systemMessage,
|
||||
temperature,
|
||||
top_p,
|
||||
})
|
||||
}
|
||||
catch (error) {
|
||||
|
|
|
@ -4,6 +4,8 @@ export interface RequestProps {
|
|||
prompt: string
|
||||
options?: ChatContext
|
||||
systemMessage: string
|
||||
temperature?: number
|
||||
top_p?: number
|
||||
}
|
||||
|
||||
export interface ChatContext {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { AxiosProgressEvent, GenericAbortSignal } from 'axios'
|
||||
import { post } from '@/utils/request'
|
||||
import { useSettingStore } from '@/store'
|
||||
import { useAuthStore, useSettingStore } from '@/store'
|
||||
|
||||
export function fetchChatAPI<T = any>(
|
||||
prompt: string,
|
||||
|
@ -28,10 +28,25 @@ export function fetchChatAPIProcess<T = any>(
|
|||
onDownloadProgress?: (progressEvent: AxiosProgressEvent) => void },
|
||||
) {
|
||||
const settingStore = useSettingStore()
|
||||
const authStore = useAuthStore()
|
||||
|
||||
let data: Record<string, any> = {
|
||||
prompt: params.prompt,
|
||||
options: params.options,
|
||||
}
|
||||
|
||||
if (authStore.isChatGPTAPI) {
|
||||
data = {
|
||||
...data,
|
||||
systemMessage: settingStore.systemMessage,
|
||||
temperature: settingStore.temperature,
|
||||
top_p: settingStore.top_p,
|
||||
}
|
||||
}
|
||||
|
||||
return post<T>({
|
||||
url: '/chat-process',
|
||||
data: { prompt: params.prompt, options: params.options, systemMessage: settingStore.systemMessage },
|
||||
data,
|
||||
signal: params.signal,
|
||||
onDownloadProgress: params.onDownloadProgress,
|
||||
})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script lang="ts" setup>
|
||||
import { ref } from 'vue'
|
||||
import { NButton, NInput, useMessage } from 'naive-ui'
|
||||
import { NButton, NInput, NSlider, useMessage } from 'naive-ui'
|
||||
import { useSettingStore } from '@/store'
|
||||
import type { SettingsState } from '@/store/modules/settings/helper'
|
||||
import { t } from '@/locales'
|
||||
|
@ -11,6 +11,10 @@ const ms = useMessage()
|
|||
|
||||
const systemMessage = ref(settingStore.systemMessage ?? '')
|
||||
|
||||
const temperature = ref(settingStore.temperature ?? 0.5)
|
||||
|
||||
const top_p = ref(settingStore.top_p ?? 1)
|
||||
|
||||
function updateSettings(options: Partial<SettingsState>) {
|
||||
settingStore.updateSetting(options)
|
||||
ms.success(t('common.success'))
|
||||
|
@ -27,7 +31,7 @@ function handleReset() {
|
|||
<div class="p-4 space-y-5 min-h-[200px]">
|
||||
<div class="space-y-6">
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex-shrink-0 w-[100px]">{{ $t('setting.role') }}</span>
|
||||
<span class="flex-shrink-0 w-[120px]">{{ $t('setting.role') }}</span>
|
||||
<div class="flex-1">
|
||||
<NInput v-model:value="systemMessage" type="textarea" :autosize="{ minRows: 1, maxRows: 4 }" />
|
||||
</div>
|
||||
|
@ -36,7 +40,27 @@ function handleReset() {
|
|||
</NButton>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex-shrink-0 w-[100px]"> </span>
|
||||
<span class="flex-shrink-0 w-[120px]">{{ $t('setting.temperature') }} </span>
|
||||
<div class="flex-1">
|
||||
<NSlider v-model:value="temperature" :max="1" :min="0" :step="0.1" />
|
||||
</div>
|
||||
<span>{{ temperature }}</span>
|
||||
<NButton size="tiny" text type="primary" @click="updateSettings({ temperature })">
|
||||
{{ $t('common.save') }}
|
||||
</NButton>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex-shrink-0 w-[120px]">{{ $t('setting.top_p') }} </span>
|
||||
<div class="flex-1">
|
||||
<NSlider v-model:value="top_p" :max="1" :min="0" :step="0.1" />
|
||||
</div>
|
||||
<span>{{ top_p }}</span>
|
||||
<NButton size="tiny" text type="primary" @click="updateSettings({ top_p })">
|
||||
{{ $t('common.save') }}
|
||||
</NButton>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<span class="flex-shrink-0 w-[120px]"> </span>
|
||||
<NButton size="small" @click="handleReset">
|
||||
{{ $t('common.reset') }}
|
||||
</NButton>
|
||||
|
|
|
@ -59,6 +59,8 @@ export default {
|
|||
name: 'Name',
|
||||
description: 'Description',
|
||||
role: 'Role',
|
||||
temperature: 'Temperature',
|
||||
top_p: 'Top_p',
|
||||
resetUserInfo: 'Reset UserInfo',
|
||||
chatHistory: 'ChatHistory',
|
||||
theme: 'Theme',
|
||||
|
|
|
@ -59,6 +59,8 @@ export default {
|
|||
name: '名称',
|
||||
description: '描述',
|
||||
role: '角色设定',
|
||||
temperature: 'Temperature',
|
||||
top_p: 'Top_p',
|
||||
resetUserInfo: '重置用户信息',
|
||||
chatHistory: '聊天记录',
|
||||
theme: '主题',
|
||||
|
|
|
@ -59,6 +59,8 @@ export default {
|
|||
name: '名稱',
|
||||
description: '描述',
|
||||
role: '角色設定',
|
||||
temperature: 'Temperature',
|
||||
top_p: 'Top_p',
|
||||
resetUserInfo: '重設使用者資訊',
|
||||
chatHistory: '紀錄',
|
||||
theme: '主題',
|
||||
|
|
|
@ -4,11 +4,15 @@ const LOCAL_NAME = 'settingsStorage'
|
|||
|
||||
export interface SettingsState {
|
||||
systemMessage: string
|
||||
temperature: number
|
||||
top_p: number
|
||||
}
|
||||
|
||||
export function defaultSetting(): SettingsState {
|
||||
return {
|
||||
systemMessage: 'You are ChatGPT, a large language model trained by OpenAI. Follow the user\'s instructions carefully. Respond using markdown.',
|
||||
temperature: 0.8,
|
||||
top_p: 1,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue