2023-04-04 10:24:09 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Summarization
|
|
|
|
class EntryPoint
|
|
|
|
def inject_into(plugin)
|
2023-06-27 11:26:33 -04:00
|
|
|
foldable_models = [
|
2024-01-29 14:04:25 -05:00
|
|
|
Models::OpenAi.new("open_ai:gpt-4", max_tokens: 8192),
|
|
|
|
Models::OpenAi.new("open_ai:gpt-4-32k", max_tokens: 32_768),
|
2024-04-10 08:53:20 -04:00
|
|
|
Models::OpenAi.new("open_ai:gpt-4-turbo", max_tokens: 100_000),
|
2024-01-29 14:04:25 -05:00
|
|
|
Models::OpenAi.new("open_ai:gpt-3.5-turbo", max_tokens: 4096),
|
|
|
|
Models::OpenAi.new("open_ai:gpt-3.5-turbo-16k", max_tokens: 16_384),
|
|
|
|
Models::Gemini.new("google:gemini-pro", max_tokens: 32_768),
|
2024-04-17 01:37:19 -04:00
|
|
|
Models::Gemini.new("google:gemini-1.5-pro", max_tokens: 800_000),
|
2023-06-27 11:26:33 -04:00
|
|
|
]
|
|
|
|
|
2024-01-29 14:04:25 -05:00
|
|
|
claude_prov = "anthropic"
|
|
|
|
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
|
|
|
|
claude_prov = "aws_bedrock"
|
|
|
|
end
|
|
|
|
|
|
|
|
foldable_models << Models::Anthropic.new("#{claude_prov}:claude-2", max_tokens: 200_000)
|
|
|
|
foldable_models << Models::Anthropic.new(
|
|
|
|
"#{claude_prov}:claude-instant-1",
|
|
|
|
max_tokens: 100_000,
|
|
|
|
)
|
2024-03-19 05:15:12 -04:00
|
|
|
foldable_models << Models::Anthropic.new(
|
|
|
|
"#{claude_prov}:claude-3-haiku",
|
|
|
|
max_tokens: 200_000,
|
|
|
|
)
|
|
|
|
foldable_models << Models::Anthropic.new(
|
|
|
|
"#{claude_prov}:claude-3-sonnet",
|
|
|
|
max_tokens: 200_000,
|
|
|
|
)
|
|
|
|
|
2024-04-17 01:37:19 -04:00
|
|
|
foldable_models << Models::Anthropic.new(
|
|
|
|
"#{claude_prov}:claude-3-opus",
|
|
|
|
max_tokens: 200_000,
|
|
|
|
)
|
2024-01-29 14:04:25 -05:00
|
|
|
|
|
|
|
mixtral_prov = "hugging_face"
|
|
|
|
if DiscourseAi::Completions::Endpoints::Vllm.correctly_configured?(
|
|
|
|
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
|
)
|
|
|
|
mixtral_prov = "vllm"
|
|
|
|
end
|
|
|
|
|
|
|
|
foldable_models << Models::Mixtral.new(
|
|
|
|
"#{mixtral_prov}:mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
|
max_tokens: 32_000,
|
|
|
|
)
|
|
|
|
|
2024-05-13 14:54:42 -04:00
|
|
|
LlmModel.all.each do |model|
|
|
|
|
foldable_models << Models::CustomLlm.new(
|
|
|
|
"custom:#{model.id}",
|
|
|
|
max_tokens: model.max_prompt_tokens,
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2023-06-27 11:26:33 -04:00
|
|
|
foldable_models.each do |model|
|
|
|
|
plugin.register_summarization_strategy(Strategies::FoldContent.new(model))
|
|
|
|
end
|
2024-05-13 14:54:42 -04:00
|
|
|
|
|
|
|
plugin.add_model_callback(LlmModel, :after_create) do
|
|
|
|
new_model = Models::CustomLlm.new("custom:#{self.id}", max_tokens: self.max_prompt_tokens)
|
|
|
|
|
|
|
|
if ::Summarization::Base.find_strategy("custom:#{self.id}").nil?
|
|
|
|
plugin.register_summarization_strategy(Strategies::FoldContent.new(new_model))
|
|
|
|
end
|
|
|
|
end
|
2023-04-04 10:24:09 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|