tweaks to open llm implementation
testing mistral at the moment and did the minimum to get it going
This commit is contained in:
parent
d76309fa17
commit
13fca098d1
|
@ -88,6 +88,7 @@ en:
|
||||||
gpt-3:
|
gpt-3:
|
||||||
5-turbo: "GPT-3.5"
|
5-turbo: "GPT-3.5"
|
||||||
claude-2: "Claude 2"
|
claude-2: "Claude 2"
|
||||||
|
open-llm: "Open LLM"
|
||||||
|
|
||||||
review:
|
review:
|
||||||
types:
|
types:
|
||||||
|
|
|
@ -228,6 +228,7 @@ discourse_ai:
|
||||||
- gpt-3.5-turbo
|
- gpt-3.5-turbo
|
||||||
- gpt-4
|
- gpt-4
|
||||||
- claude-2
|
- claude-2
|
||||||
|
- open-llm
|
||||||
ai_bot_enabled_chat_commands:
|
ai_bot_enabled_chat_commands:
|
||||||
type: list
|
type: list
|
||||||
default: "categories|google|image|search|tags|time|read"
|
default: "categories|google|image|search|tags|time|read"
|
||||||
|
|
|
@ -42,10 +42,6 @@ module DiscourseAi
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def populate_functions(partial, function)
|
|
||||||
# nothing to do here, no proper function support quite yet
|
|
||||||
end
|
|
||||||
|
|
||||||
def build_message(poster_username, content, system: false, function: nil)
|
def build_message(poster_username, content, system: false, function: nil)
|
||||||
{ bot: poster_username == bot_user.username, username: poster_username, content: content }
|
{ bot: poster_username == bot_user.username, username: poster_username, content: content }
|
||||||
end
|
end
|
||||||
|
@ -69,7 +65,7 @@ module DiscourseAi
|
||||||
prompt,
|
prompt,
|
||||||
model_for,
|
model_for,
|
||||||
temperature: 0.4,
|
temperature: 0.4,
|
||||||
max_tokens: 600,
|
max_tokens: 1000,
|
||||||
&blk
|
&blk
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in New Issue