discourse-ai/lib/completions/dialects/llama2_classic.rb
Roman Rizzi 3064d4c288
REFACTOR: Summarization and HyDE now use an LLM abstraction. (#297)
* DEV: One LLM abstraction to rule them all

* REFACTOR: HyDE search uses new LLM abstraction

* REFACTOR: Summarization uses the LLM abstraction

* Updated documentation and made small fixes. Remove Bedrock claude-2 restriction
2023-11-23 12:58:54 -03:00

32 lines
854 B
Ruby

# frozen_string_literal: true
module DiscourseAi
module Completions
module Dialects
class Llama2Classic
def self.can_translate?(model_name)
"Llama2-*-chat-hf" == model_name
end
def translate(generic_prompt)
llama2_prompt =
+"[INST]<<SYS>>#{[generic_prompt[:insts], generic_prompt[:post_insts].to_s].join("\n")}<</SYS>>[/INST]\n"
if generic_prompt[:examples]
generic_prompt[:examples].each do |example_pair|
llama2_prompt << "[INST]#{example_pair.first}[/INST]\n"
llama2_prompt << "#{example_pair.second}\n"
end
end
llama2_prompt << "[INST]#{generic_prompt[:input]}[/INST]\n"
end
def tokenizer
DiscourseAi::Tokenizer::Llama2Tokenizer
end
end
end
end
end