mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-02-07 03:58:18 +00:00
12 lines
269 B
Ruby
12 lines
269 B
Ruby
|
# frozen_string_literal: true
|
||
|
|
||
|
module DiscourseAi
|
||
|
module Tokenizer
|
||
|
class BgeLargeEnTokenizer < BasicTokenizer
|
||
|
def self.tokenizer
|
||
|
@@tokenizer ||= Tokenizers.from_file("./plugins/discourse-ai/tokenizers/bge-large-en.json")
|
||
|
end
|
||
|
end
|
||
|
end
|
||
|
end
|