mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-08-10 07:03:40 +00:00
13 lines
288 B
Ruby
13 lines
288 B
Ruby
# frozen_string_literal: true
|
|
|
|
module DiscourseAi
|
|
module Tokenizer
|
|
class AllMpnetBaseV2Tokenizer < BasicTokenizer
|
|
def self.tokenizer
|
|
@@tokenizer ||=
|
|
Tokenizers.from_file("./plugins/discourse-ai/tokenizers/all-mpnet-base-v2.json")
|
|
end
|
|
end
|
|
end
|
|
end
|