FEATURE: Allow using large context OpenAI models for summarization (#86)
This commit is contained in:
parent
3364fec425
commit
8742535024
|
@ -12,7 +12,9 @@ module DiscourseAi
|
|||
def inject_into(plugin)
|
||||
[
|
||||
Strategies::OpenAi.new("gpt-4"),
|
||||
Strategies::OpenAi.new("gpt-4-32k"),
|
||||
Strategies::OpenAi.new("gpt-3.5-turbo"),
|
||||
Strategies::OpenAi.new("gpt-3.5-turbo-16k"),
|
||||
Strategies::DiscourseAi.new("bart-large-cnn-samsum"),
|
||||
Strategies::DiscourseAi.new("flan-t5-base-samsum"),
|
||||
Strategies::DiscourseAi.new("long-t5-tglobal-base-16384-book-summary"),
|
||||
|
|
|
@ -41,7 +41,12 @@ module DiscourseAi
|
|||
private
|
||||
|
||||
def max_length
|
||||
lengths = { "gpt-3.5-turbo" => 4096, "gpt-4" => 8192 }
|
||||
lengths = {
|
||||
"gpt-3.5-turbo" => 4096,
|
||||
"gpt-4" => 8192,
|
||||
"gpt-3.5-turbo-16k" => 16_384,
|
||||
"gpt-4-32k" => 32_768,
|
||||
}
|
||||
|
||||
lengths[model]
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue