discourse-ai/spec/fabricators/llm_model_fabricator.rb

113 lines
3.0 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
Fabricator(:llm_model) do
display_name "A good model"
name "gpt-4-turbo"
provider "open_ai"
FEATURE: custom user defined tools (#677) Introduces custom AI tools functionality. 1. Why it was added: The PR adds the ability to create, manage, and use custom AI tools within the Discourse AI system. This feature allows for more flexibility and extensibility in the AI capabilities of the platform. 2. What it does: - Introduces a new `AiTool` model for storing custom AI tools - Adds CRUD (Create, Read, Update, Delete) operations for AI tools - Implements a tool runner system for executing custom tool scripts - Integrates custom tools with existing AI personas - Provides a user interface for managing custom tools in the admin panel 3. Possible use cases: - Creating custom tools for specific tasks or integrations (stock quotes, currency conversion etc...) - Allowing administrators to add new functionalities to AI assistants without modifying core code - Implementing domain-specific tools for particular communities or industries 4. Code structure: The PR introduces several new files and modifies existing ones: a. Models: - `app/models/ai_tool.rb`: Defines the AiTool model - `app/serializers/ai_custom_tool_serializer.rb`: Serializer for AI tools b. Controllers: - `app/controllers/discourse_ai/admin/ai_tools_controller.rb`: Handles CRUD operations for AI tools c. Views and Components: - New Ember.js components for tool management in the admin interface - Updates to existing AI persona management components to support custom tools d. Core functionality: - `lib/ai_bot/tool_runner.rb`: Implements the custom tool execution system - `lib/ai_bot/tools/custom.rb`: Defines the custom tool class e. Routes and configurations: - Updates to route configurations to include new AI tool management pages f. Migrations: - `db/migrate/20240618080148_create_ai_tools.rb`: Creates the ai_tools table g. Tests: - New test files for AI tool functionality and integration The PR integrates the custom tools system with the existing AI persona framework, allowing personas to use both built-in and custom tools. It also includes safety measures such as timeouts and HTTP request limits to prevent misuse of custom tools. Overall, this PR significantly enhances the flexibility and extensibility of the Discourse AI system by allowing administrators to create and manage custom AI tools tailored to their specific needs. Co-authored-by: Martin Brennan <martin@discourse.org>
2024-06-27 03:27:40 -04:00
tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer"
api_key "123"
url "https://api.openai.com/v1/chat/completions"
max_prompt_tokens 131_072
end
Fabricator(:anthropic_model, from: :llm_model) do
display_name "Claude 3 Opus"
name "claude-3-opus"
max_prompt_tokens 200_000
url "https://api.anthropic.com/v1/messages"
tokenizer "DiscourseAi::Tokenizer::AnthropicTokenizer"
provider "anthropic"
end
Fabricator(:hf_model, from: :llm_model) do
display_name "Llama 3.1"
name "meta-llama/Meta-Llama-3.1-70B-Instruct"
max_prompt_tokens 64_000
tokenizer "DiscourseAi::Tokenizer::Llama3Tokenizer"
url "https://test.dev/v1/chat/completions"
provider "hugging_face"
end
Fabricator(:vllm_model, from: :llm_model) do
display_name "Llama 3.1 vLLM"
name "meta-llama/Meta-Llama-3.1-70B-Instruct"
max_prompt_tokens 64_000
tokenizer "DiscourseAi::Tokenizer::Llama3Tokenizer"
url "https://test.dev/v1/chat/completions"
provider "vllm"
end
Fabricator(:fake_model, from: :llm_model) do
display_name "Fake model"
name "fake"
provider "fake"
tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer"
max_prompt_tokens 32_000
api_key "fake"
url "https://fake.test/"
end
Fabricator(:gemini_model, from: :llm_model) do
display_name "Gemini"
name "gemini-1.5-pro"
provider "google"
tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer"
max_prompt_tokens 800_000
url "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-pro-latest"
end
Fabricator(:bedrock_model, from: :anthropic_model) do
url ""
provider "aws_bedrock"
api_key "asd-asd-asd"
name "claude-3-sonnet"
provider_params { { region: "us-east-1", access_key_id: "123456" } }
end
Fabricator(:cohere_model, from: :llm_model) do
display_name "Cohere Command R+"
name "command-r-plus"
provider "cohere"
api_key "ABC"
url "https://api.cohere.ai/v1/chat"
end
Fabricator(:samba_nova_model, from: :llm_model) do
display_name "Samba Nova"
name "samba-nova"
provider "samba_nova"
api_key "ABC"
url "https://api.sambanova.ai/v1/chat/completions"
end
Fabricator(:ollama_model, from: :llm_model) do
display_name "Ollama llama 3.1"
name "llama-3.1"
provider "ollama"
api_key "ABC"
tokenizer "DiscourseAi::Tokenizer::Llama3Tokenizer"
url "http://api.ollama.ai/api/chat"
provider_params { { enable_native_tool: true } }
end
Fabricator(:mistral_model, from: :llm_model) do
display_name "Mistral Large"
name "mistral-large-latest"
provider "mistral"
api_key "ABC"
tokenizer "DiscourseAi::Tokenizer::MixtralTokenizer"
url "https://api.mistral.ai/v1/chat/completions"
provider_params { { disable_native_tools: false } }
end
Fabricator(:seeded_model, from: :llm_model) do
id "-2"
display_name "CDCK Hosted Model"
name "cdck-hosted"
provider "fake"
api_key "DSC"
tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer"
url "https://cdck.test/"
enabled_chat_bot true
end