feat: 支持最新的gpt-3.5-turbo-16k模型 (#1789)
* fix: 增加 16k 模型支持 * fix: 修改判断逻辑 * fix: 修改 gpt-3.5-turbo tokens 判断逻辑 --------- Co-authored-by: ziyang <ziyang@dora.design>
This commit is contained in:
parent
bc390ef09d
commit
6e272bb343
|
@ -218,7 +218,7 @@ services:
|
|||
# API接口地址,可选,设置 OPENAI_API_KEY 时可用
|
||||
OPENAI_API_BASE_URL: xxx
|
||||
# API模型,可选,设置 OPENAI_API_KEY 时可用,https://platform.openai.com/docs/models
|
||||
# gpt-4, gpt-4-0314, gpt-4-32k, gpt-4-32k-0314, gpt-3.5-turbo, gpt-3.5-turbo-0301, text-davinci-003, text-davinci-002, code-davinci-002
|
||||
# gpt-4, gpt-4-0314, gpt-4-0613, gpt-4-32k, gpt-4-32k-0314, gpt-4-32k-0613, gpt-3.5-turbo-16k, gpt-3.5-turbo-16k-0613, gpt-3.5-turbo, gpt-3.5-turbo-0301, gpt-3.5-turbo-0613, text-davinci-003, text-davinci-002, code-davinci-002
|
||||
OPENAI_API_MODEL: xxx
|
||||
# 反向代理,可选
|
||||
API_REVERSE_PROXY: xxx
|
||||
|
|
|
@ -58,6 +58,12 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
|
|||
options.maxResponseTokens = 2048
|
||||
}
|
||||
}
|
||||
else if (model.toLowerCase().includes('gpt-3.5')) {
|
||||
if (model.toLowerCase().includes('16k')) {
|
||||
options.maxModelTokens = 16384
|
||||
options.maxResponseTokens = 4096
|
||||
}
|
||||
}
|
||||
|
||||
if (isNotEmptyString(OPENAI_API_BASE_URL))
|
||||
options.apiBaseUrl = `${OPENAI_API_BASE_URL}/v1`
|
||||
|
|
Loading…
Reference in New Issue