41 lines
1.6 KiB
Ruby
41 lines
1.6 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
module DiscourseAi
|
|
module Summarization
|
|
class EntryPoint
|
|
def inject_into(plugin)
|
|
foldable_models = [
|
|
Models::OpenAi.new("gpt-4", max_tokens: 8192),
|
|
Models::OpenAi.new("gpt-4-32k", max_tokens: 32_768),
|
|
Models::OpenAi.new("gpt-4-1106-preview", max_tokens: 100_000),
|
|
Models::OpenAi.new("gpt-3.5-turbo", max_tokens: 4096),
|
|
Models::OpenAi.new("gpt-3.5-turbo-16k", max_tokens: 16_384),
|
|
Models::Anthropic.new("claude-2", max_tokens: 200_000),
|
|
Models::Anthropic.new("claude-instant-1", max_tokens: 100_000),
|
|
Models::Llama2.new("Llama2-chat-hf", max_tokens: SiteSetting.ai_hugging_face_token_limit),
|
|
Models::Llama2FineTunedOrcaStyle.new(
|
|
"StableBeluga2",
|
|
max_tokens: SiteSetting.ai_hugging_face_token_limit,
|
|
),
|
|
Models::Gemini.new("gemini-pro", max_tokens: 32_768),
|
|
Models::Mixtral.new("mistralai/Mixtral-8x7B-Instruct-v0.1", max_tokens: 32_000),
|
|
]
|
|
|
|
foldable_models.each do |model|
|
|
plugin.register_summarization_strategy(Strategies::FoldContent.new(model))
|
|
end
|
|
|
|
truncatable_models = [
|
|
Models::Discourse.new("long-t5-tglobal-base-16384-book-summary", max_tokens: 16_384),
|
|
Models::Discourse.new("bart-large-cnn-samsum", max_tokens: 1024),
|
|
Models::Discourse.new("flan-t5-base-samsum", max_tokens: 512),
|
|
]
|
|
|
|
truncatable_models.each do |model|
|
|
plugin.register_summarization_strategy(Strategies::TruncateContent.new(model))
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|