mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-07-01 12:02:16 +00:00
12 lines
256 B
Ruby
12 lines
256 B
Ruby
|
# frozen_string_literal: true
|
||
|
|
||
|
module DiscourseAi
|
||
|
module Tokenizer
|
||
|
class QwenTokenizer < BasicTokenizer
|
||
|
def self.tokenizer
|
||
|
@@tokenizer ||= Tokenizers.from_file("./plugins/discourse-ai/tokenizers/qwen3.json")
|
||
|
end
|
||
|
end
|
||
|
end
|
||
|
end
|