FEATURE: AI artifacts (#898)
This is a significant PR that introduces AI Artifacts functionality to the discourse-ai plugin along with several other improvements. Here are the key changes: 1. AI Artifacts System: - Adds a new `AiArtifact` model and database migration - Allows creation of web artifacts with HTML, CSS, and JavaScript content - Introduces security settings (`strict`, `lax`, `disabled`) for controlling artifact execution - Implements artifact rendering in iframes with sandbox protection - New `CreateArtifact` tool for AI to generate interactive content 2. Tool System Improvements: - Adds support for partial tool calls, allowing incremental updates during generation - Better handling of tool call states and progress tracking - Improved XML tool processing with CDATA support - Fixes for tool parameter handling and duplicate invocations 3. LLM Provider Updates: - Updates for Anthropic Claude models with correct token limits - Adds support for native/XML tool modes in Gemini integration - Adds new model configurations including Llama 3.1 models - Improvements to streaming response handling 4. UI Enhancements: - New artifact viewer component with expand/collapse functionality - Security controls for artifact execution (click-to-run in strict mode) - Improved dialog and response handling - Better error management for tool execution 5. Security Improvements: - Sandbox controls for artifact execution - Public/private artifact sharing controls - Security settings to control artifact behavior - CSP and frame-options handling for artifacts 6. Technical Improvements: - Better post streaming implementation - Improved error handling in completions - Better memory management for partial tool calls - Enhanced testing coverage 7. Configuration: - New site settings for artifact security - Extended LLM model configurations - Additional tool configuration options This PR significantly enhances the plugin's capabilities for generating and displaying interactive content while maintaining security and providing flexible configuration options for administrators.
This commit is contained in:
parent
4fb686a548
commit
0d7f353284
|
@ -0,0 +1,59 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module DiscourseAi
|
||||||
|
module AiBot
|
||||||
|
class ArtifactsController < ApplicationController
|
||||||
|
requires_plugin DiscourseAi::PLUGIN_NAME
|
||||||
|
before_action :require_site_settings!
|
||||||
|
|
||||||
|
skip_before_action :preload_json, :check_xhr, only: %i[show]
|
||||||
|
|
||||||
|
def show
|
||||||
|
artifact = AiArtifact.find(params[:id])
|
||||||
|
|
||||||
|
post = Post.find_by(id: artifact.post_id)
|
||||||
|
if artifact.metadata&.dig("public")
|
||||||
|
# no guardian needed
|
||||||
|
else
|
||||||
|
raise Discourse::NotFound if !post&.topic&.private_message?
|
||||||
|
raise Discourse::NotFound if !guardian.can_see?(post)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Prepare the HTML document
|
||||||
|
html = <<~HTML
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>#{ERB::Util.html_escape(artifact.name)}</title>
|
||||||
|
<style>
|
||||||
|
#{artifact.css}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
#{artifact.html}
|
||||||
|
<script>
|
||||||
|
#{artifact.js}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
HTML
|
||||||
|
|
||||||
|
response.headers.delete("X-Frame-Options")
|
||||||
|
response.headers.delete("Content-Security-Policy")
|
||||||
|
|
||||||
|
# Render the content
|
||||||
|
render html: html.html_safe, layout: false, content_type: "text/html"
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def require_site_settings!
|
||||||
|
if !SiteSetting.discourse_ai_enabled ||
|
||||||
|
!SiteSetting.ai_artifact_security.in?(%w[lax strict])
|
||||||
|
raise Discourse::NotFound
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,49 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
class AiArtifact < ActiveRecord::Base
|
||||||
|
belongs_to :user
|
||||||
|
belongs_to :post
|
||||||
|
validates :html, length: { maximum: 65_535 }
|
||||||
|
validates :css, length: { maximum: 65_535 }
|
||||||
|
validates :js, length: { maximum: 65_535 }
|
||||||
|
|
||||||
|
def self.iframe_for(id)
|
||||||
|
<<~HTML
|
||||||
|
<iframe sandbox="allow-scripts allow-forms" height="600px" src='#{url(id)}' frameborder="0" width="100%"></iframe>
|
||||||
|
HTML
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.url(id)
|
||||||
|
Discourse.base_url + "/discourse-ai/ai-bot/artifacts/#{id}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.share_publicly(id:, post:)
|
||||||
|
artifact = AiArtifact.find_by(id: id)
|
||||||
|
artifact.update!(metadata: { public: true }) if artifact&.post&.topic&.id == post.topic.id
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.unshare_publicly(id:)
|
||||||
|
artifact = AiArtifact.find_by(id: id)
|
||||||
|
artifact&.update!(metadata: { public: false })
|
||||||
|
end
|
||||||
|
|
||||||
|
def url
|
||||||
|
self.class.url(id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# == Schema Information
|
||||||
|
#
|
||||||
|
# Table name: ai_artifacts
|
||||||
|
#
|
||||||
|
# id :bigint not null, primary key
|
||||||
|
# user_id :integer not null
|
||||||
|
# post_id :integer not null
|
||||||
|
# name :string(255) not null
|
||||||
|
# html :string(65535)
|
||||||
|
# css :string(65535)
|
||||||
|
# js :string(65535)
|
||||||
|
# metadata :jsonb
|
||||||
|
# created_at :datetime not null
|
||||||
|
# updated_at :datetime not null
|
||||||
|
#
|
|
@ -26,6 +26,13 @@ class LlmModel < ActiveRecord::Base
|
||||||
},
|
},
|
||||||
open_ai: {
|
open_ai: {
|
||||||
organization: :text,
|
organization: :text,
|
||||||
|
disable_native_tools: :checkbox,
|
||||||
|
},
|
||||||
|
google: {
|
||||||
|
disable_native_tools: :checkbox,
|
||||||
|
},
|
||||||
|
azure: {
|
||||||
|
disable_native_tools: :checkbox,
|
||||||
},
|
},
|
||||||
hugging_face: {
|
hugging_face: {
|
||||||
disable_system_prompt: :checkbox,
|
disable_system_prompt: :checkbox,
|
||||||
|
|
|
@ -34,6 +34,12 @@ class SharedAiConversation < ActiveRecord::Base
|
||||||
|
|
||||||
def self.destroy_conversation(conversation)
|
def self.destroy_conversation(conversation)
|
||||||
conversation.destroy
|
conversation.destroy
|
||||||
|
|
||||||
|
maybe_topic = conversation.target
|
||||||
|
if maybe_topic.is_a?(Topic)
|
||||||
|
AiArtifact.where(post: maybe_topic.posts).update_all(metadata: { public: false })
|
||||||
|
end
|
||||||
|
|
||||||
::Jobs.enqueue(
|
::Jobs.enqueue(
|
||||||
:shared_conversation_adjust_upload_security,
|
:shared_conversation_adjust_upload_security,
|
||||||
target_id: conversation.target_id,
|
target_id: conversation.target_id,
|
||||||
|
@ -165,7 +171,7 @@ class SharedAiConversation < ActiveRecord::Base
|
||||||
id: post.id,
|
id: post.id,
|
||||||
user_id: post.user_id,
|
user_id: post.user_id,
|
||||||
created_at: post.created_at,
|
created_at: post.created_at,
|
||||||
cooked: post.cooked,
|
cooked: cook_artifacts(post),
|
||||||
}
|
}
|
||||||
|
|
||||||
mapped[:persona] = persona if ai_bot_participant&.id == post.user_id
|
mapped[:persona] = persona if ai_bot_participant&.id == post.user_id
|
||||||
|
@ -175,6 +181,24 @@ class SharedAiConversation < ActiveRecord::Base
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def self.cook_artifacts(post)
|
||||||
|
html = post.cooked
|
||||||
|
return html if !%w[lax strict].include?(SiteSetting.ai_artifact_security)
|
||||||
|
|
||||||
|
doc = Nokogiri::HTML5.fragment(html)
|
||||||
|
doc
|
||||||
|
.css("div.ai-artifact")
|
||||||
|
.each do |node|
|
||||||
|
id = node["data-ai-artifact-id"].to_i
|
||||||
|
if id > 0
|
||||||
|
AiArtifact.share_publicly(id: id, post: post)
|
||||||
|
node.replace(AiArtifact.iframe_for(id))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
doc.to_s
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def populate_user_info!(posts)
|
def populate_user_info!(posts)
|
||||||
|
|
|
@ -0,0 +1,122 @@
|
||||||
|
import Component from "@glimmer/component";
|
||||||
|
import { tracked } from "@glimmer/tracking";
|
||||||
|
import { on } from "@ember/modifier";
|
||||||
|
import { action } from "@ember/object";
|
||||||
|
import { inject as service } from "@ember/service";
|
||||||
|
import DButton from "discourse/components/d-button";
|
||||||
|
import htmlClass from "discourse/helpers/html-class";
|
||||||
|
import getURL from "discourse-common/lib/get-url";
|
||||||
|
|
||||||
|
export default class AiArtifactComponent extends Component {
|
||||||
|
@service siteSettings;
|
||||||
|
@tracked expanded = false;
|
||||||
|
@tracked showingArtifact = false;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super(...arguments);
|
||||||
|
this.keydownHandler = this.handleKeydown.bind(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
willDestroy() {
|
||||||
|
super.willDestroy(...arguments);
|
||||||
|
window.removeEventListener("keydown", this.keydownHandler);
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
handleKeydown(event) {
|
||||||
|
if (event.key === "Escape" || event.key === "Esc") {
|
||||||
|
this.expanded = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get requireClickToRun() {
|
||||||
|
if (this.showingArtifact) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return this.siteSettings.ai_artifact_security === "strict";
|
||||||
|
}
|
||||||
|
|
||||||
|
get artifactUrl() {
|
||||||
|
return getURL(`/discourse-ai/ai-bot/artifacts/${this.args.artifactId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
showArtifact() {
|
||||||
|
this.showingArtifact = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
toggleView() {
|
||||||
|
this.expanded = !this.expanded;
|
||||||
|
if (this.expanded) {
|
||||||
|
window.addEventListener("keydown", this.keydownHandler);
|
||||||
|
} else {
|
||||||
|
window.removeEventListener("keydown", this.keydownHandler);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get wrapperClasses() {
|
||||||
|
return `ai-artifact__wrapper ${
|
||||||
|
this.expanded ? "ai-artifact__expanded" : ""
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
@action
|
||||||
|
artifactPanelHover() {
|
||||||
|
// retrrigger animation
|
||||||
|
const panel = document.querySelector(".ai-artifact__panel");
|
||||||
|
panel.style.animation = "none"; // Stop the animation
|
||||||
|
setTimeout(() => {
|
||||||
|
panel.style.animation = ""; // Re-trigger the animation by removing the none style
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
<template>
|
||||||
|
{{#if this.expanded}}
|
||||||
|
{{htmlClass "ai-artifact-expanded"}}
|
||||||
|
{{/if}}
|
||||||
|
<div class={{this.wrapperClasses}}>
|
||||||
|
<div
|
||||||
|
class="ai-artifact__panel--wrapper"
|
||||||
|
{{on "mouseleave" this.artifactPanelHover}}
|
||||||
|
>
|
||||||
|
<div class="ai-artifact__panel">
|
||||||
|
<DButton
|
||||||
|
class="btn-flat btn-icon-text"
|
||||||
|
@icon="discourse-compress"
|
||||||
|
@label="discourse_ai.ai_artifact.collapse_view_label"
|
||||||
|
@action={{this.toggleView}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{{#if this.requireClickToRun}}
|
||||||
|
<div class="ai-artifact__click-to-run">
|
||||||
|
<DButton
|
||||||
|
class="btn btn-primary"
|
||||||
|
@icon="play"
|
||||||
|
@label="discourse_ai.ai_artifact.click_to_run_label"
|
||||||
|
@action={{this.showArtifact}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{{else}}
|
||||||
|
<iframe
|
||||||
|
title="AI Artifact"
|
||||||
|
src={{this.artifactUrl}}
|
||||||
|
width="100%"
|
||||||
|
frameborder="0"
|
||||||
|
sandbox="allow-scripts allow-forms"
|
||||||
|
></iframe>
|
||||||
|
{{/if}}
|
||||||
|
{{#unless this.requireClickToRun}}
|
||||||
|
<div class="ai-artifact__footer">
|
||||||
|
<DButton
|
||||||
|
class="btn-flat btn-icon-text ai-artifact__expand-button"
|
||||||
|
@icon="discourse-expand"
|
||||||
|
@label="discourse_ai.ai_artifact.expand_view_label"
|
||||||
|
@action={{this.toggleView}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{{/unless}}
|
||||||
|
</div>
|
||||||
|
</template>
|
||||||
|
}
|
|
@ -0,0 +1,35 @@
|
||||||
|
import { withPluginApi } from "discourse/lib/plugin-api";
|
||||||
|
import AiArtifact from "../discourse/components/ai-artifact";
|
||||||
|
|
||||||
|
function initializeAiArtifacts(api) {
|
||||||
|
api.decorateCookedElement(
|
||||||
|
(element, helper) => {
|
||||||
|
if (!helper.renderGlimmer) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
[...element.querySelectorAll("div.ai-artifact")].forEach(
|
||||||
|
(artifactElement) => {
|
||||||
|
const artifactId = artifactElement.getAttribute(
|
||||||
|
"data-ai-artifact-id"
|
||||||
|
);
|
||||||
|
|
||||||
|
helper.renderGlimmer(artifactElement, <template>
|
||||||
|
<AiArtifact @artifactId={{artifactId}} />
|
||||||
|
</template>);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "ai-artifact",
|
||||||
|
onlyStream: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
name: "ai-artifact",
|
||||||
|
initialize() {
|
||||||
|
withPluginApi("0.8.7", initializeAiArtifacts);
|
||||||
|
},
|
||||||
|
};
|
|
@ -1,3 +1,4 @@
|
||||||
export function setup(helper) {
|
export function setup(helper) {
|
||||||
helper.allowList(["details[class=ai-quote]"]);
|
helper.allowList(["details[class=ai-quote]"]);
|
||||||
|
helper.allowList(["div[class=ai-artifact]", "div[data-ai-artifact-id]"]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
.ai-artifact__wrapper {
|
||||||
|
iframe {
|
||||||
|
width: 100%;
|
||||||
|
height: calc(100% - 2em);
|
||||||
|
}
|
||||||
|
height: 500px;
|
||||||
|
padding-bottom: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__click-to-run {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__panel {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ai-artifact-expanded {
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__footer {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
.ai-artifact__expand-button {
|
||||||
|
margin-left: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__expanded {
|
||||||
|
.ai-artifact__footer {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__panel--wrapper {
|
||||||
|
display: block;
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
height: 4em;
|
||||||
|
z-index: 1000000;
|
||||||
|
&:hover {
|
||||||
|
.ai-artifact__panel {
|
||||||
|
transform: translateY(0) !important;
|
||||||
|
animation: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.ai-artifact__panel {
|
||||||
|
display: block;
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
height: 2em;
|
||||||
|
transition: transform 0.5s ease-in-out;
|
||||||
|
animation: slideUp 0.5s 3s forwards;
|
||||||
|
background-color: var(--secondary-low);
|
||||||
|
opacity: 0.9;
|
||||||
|
transform: translateY(0);
|
||||||
|
button {
|
||||||
|
width: 100%;
|
||||||
|
text-align: left;
|
||||||
|
box-sizing: border-box;
|
||||||
|
justify-content: flex-start;
|
||||||
|
color: var(--secondary-very-high);
|
||||||
|
&:hover {
|
||||||
|
color: var(--secondary-very-high);
|
||||||
|
.d-icon {
|
||||||
|
color: var(--secondary-high);
|
||||||
|
}
|
||||||
|
//color: var(--secondary-vary-low);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@keyframes slideUp {
|
||||||
|
to {
|
||||||
|
transform: translateY(-100%);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
iframe {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
height: 100%;
|
||||||
|
max-height: 100%;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
z-index: z("fullscreen");
|
||||||
|
}
|
||||||
|
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
z-index: z("fullscreen");
|
||||||
|
background-color: var(--secondary);
|
||||||
|
}
|
|
@ -165,7 +165,7 @@ en:
|
||||||
saved: "Persona saved"
|
saved: "Persona saved"
|
||||||
enabled: "Enabled?"
|
enabled: "Enabled?"
|
||||||
tools: "Enabled tools"
|
tools: "Enabled tools"
|
||||||
forced_tools: "Forced fools"
|
forced_tools: "Forced tools"
|
||||||
allowed_groups: "Allowed groups"
|
allowed_groups: "Allowed groups"
|
||||||
confirm_delete: "Are you sure you want to delete this persona?"
|
confirm_delete: "Are you sure you want to delete this persona?"
|
||||||
new: "New persona"
|
new: "New persona"
|
||||||
|
@ -279,14 +279,17 @@ en:
|
||||||
model_description:
|
model_description:
|
||||||
none: "General settings that work for most language models"
|
none: "General settings that work for most language models"
|
||||||
anthropic-claude-3-5-sonnet: "Anthropic's most intelligent model"
|
anthropic-claude-3-5-sonnet: "Anthropic's most intelligent model"
|
||||||
|
anthropic-claude-3-5-haiku: "Fast and cost-effective"
|
||||||
anthropic-claude-3-opus: "Excels at writing and complex tasks"
|
anthropic-claude-3-opus: "Excels at writing and complex tasks"
|
||||||
anthropic-claude-3-sonnet: "Balance of speed and intelligence"
|
|
||||||
anthropic-claude-3-haiku: "Fast and cost-effective"
|
|
||||||
google-gemini-1-5-pro: "Mid-sized multimodal model capable of a wide range of tasks"
|
google-gemini-1-5-pro: "Mid-sized multimodal model capable of a wide range of tasks"
|
||||||
google-gemini-1-5-flash: "Lightweight, fast, and cost-efficient with multimodal reasoning"
|
google-gemini-1-5-flash: "Lightweight, fast, and cost-efficient with multimodal reasoning"
|
||||||
open_ai-gpt-4-turbo: "Previous generation high-intelligence model"
|
open_ai-gpt-4-turbo: "Previous generation high-intelligence model"
|
||||||
open_ai-gpt-4o: "High intelligence model for complex, multi-step tasks"
|
open_ai-gpt-4o: "High intelligence model for complex, multi-step tasks"
|
||||||
open_ai-gpt-4o-mini: "Affordable and fast small model for lightweight tasks"
|
open_ai-gpt-4o-mini: "Affordable and fast small model for lightweight tasks"
|
||||||
|
open_ai-o1-mini: "Cost-efficient reasoning model"
|
||||||
|
open_ai-o1-preview: "Open AI's most capabale reasoning model"
|
||||||
|
samba_nova-Meta-Llama-3-1-8B-Instruct: "Efficient lightweight multilingual model"
|
||||||
|
samba_nova-Meta-Llama-3-1-70B-Instruct": "Powerful multipurpose model"
|
||||||
|
|
||||||
configured:
|
configured:
|
||||||
title: "Configured LLMs"
|
title: "Configured LLMs"
|
||||||
|
@ -399,6 +402,11 @@ en:
|
||||||
quick_search:
|
quick_search:
|
||||||
suffix: "in all topics and posts with AI"
|
suffix: "in all topics and posts with AI"
|
||||||
|
|
||||||
|
ai_artifact:
|
||||||
|
expand_view_label: "Expand view"
|
||||||
|
collapse_view_label: "Exit Fullscreen (ESC)"
|
||||||
|
click_to_run_label: "Run Artifact"
|
||||||
|
|
||||||
ai_bot:
|
ai_bot:
|
||||||
pm_warning: "AI chatbot messages are monitored regularly by moderators."
|
pm_warning: "AI chatbot messages are monitored regularly by moderators."
|
||||||
cancel_streaming: "Stop reply"
|
cancel_streaming: "Stop reply"
|
||||||
|
|
|
@ -17,6 +17,7 @@ en:
|
||||||
description: "Periodic report based on a large language model"
|
description: "Periodic report based on a large language model"
|
||||||
site_settings:
|
site_settings:
|
||||||
discourse_ai_enabled: "Enable the discourse AI plugin."
|
discourse_ai_enabled: "Enable the discourse AI plugin."
|
||||||
|
ai_artifact_security: "The AI artifact system generates IFRAMEs with runnable code. Strict mode disables sharing and forces an extra click to run code. Lax mode allows sharing of artifacts and runs code directly. Disabled mode disables the artifact system."
|
||||||
ai_toxicity_enabled: "Enable the toxicity module."
|
ai_toxicity_enabled: "Enable the toxicity module."
|
||||||
ai_toxicity_inference_service_api_endpoint: "URL where the API is running for the toxicity module"
|
ai_toxicity_inference_service_api_endpoint: "URL where the API is running for the toxicity module"
|
||||||
ai_toxicity_inference_service_api_key: "API key for the toxicity API"
|
ai_toxicity_inference_service_api_key: "API key for the toxicity API"
|
||||||
|
@ -79,7 +80,7 @@ en:
|
||||||
ai_embeddings_semantic_related_include_closed_topics: "Include closed topics in semantic search results"
|
ai_embeddings_semantic_related_include_closed_topics: "Include closed topics in semantic search results"
|
||||||
ai_embeddings_semantic_search_hyde_model: "Model used to expand keywords to get better results during a semantic search"
|
ai_embeddings_semantic_search_hyde_model: "Model used to expand keywords to get better results during a semantic search"
|
||||||
ai_embeddings_per_post_enabled: Generate embeddings for each post
|
ai_embeddings_per_post_enabled: Generate embeddings for each post
|
||||||
|
|
||||||
ai_summarization_enabled: "Enable the topic summarization module."
|
ai_summarization_enabled: "Enable the topic summarization module."
|
||||||
ai_summarization_model: "Model to use for summarization."
|
ai_summarization_model: "Model to use for summarization."
|
||||||
ai_custom_summarization_allowed_groups: "Groups allowed to use create new summaries."
|
ai_custom_summarization_allowed_groups: "Groups allowed to use create new summaries."
|
||||||
|
@ -199,6 +200,9 @@ en:
|
||||||
discourse_helper:
|
discourse_helper:
|
||||||
name: "Discourse Helper"
|
name: "Discourse Helper"
|
||||||
description: "AI Bot specialized in helping with Discourse related tasks"
|
description: "AI Bot specialized in helping with Discourse related tasks"
|
||||||
|
web_artifact_creator:
|
||||||
|
name: "Web Artifact Creator"
|
||||||
|
description: "AI Bot specialized in creating interactive web artifacts"
|
||||||
topic_not_found: "Summary unavailable, topic not found!"
|
topic_not_found: "Summary unavailable, topic not found!"
|
||||||
summarizing: "Summarizing topic"
|
summarizing: "Summarizing topic"
|
||||||
searching: "Searching for: '%{query}'"
|
searching: "Searching for: '%{query}'"
|
||||||
|
@ -222,6 +226,7 @@ en:
|
||||||
name: "Base Search Query"
|
name: "Base Search Query"
|
||||||
description: "Base query to use when searching. Example: '#urgent' will prepend '#urgent' to the search query and only include topics with the urgent category or tag."
|
description: "Base query to use when searching. Example: '#urgent' will prepend '#urgent' to the search query and only include topics with the urgent category or tag."
|
||||||
tool_summary:
|
tool_summary:
|
||||||
|
create_artifact: "Create web artifact"
|
||||||
web_browser: "Browse Web"
|
web_browser: "Browse Web"
|
||||||
github_search_files: "GitHub search files"
|
github_search_files: "GitHub search files"
|
||||||
github_search_code: "GitHub code search"
|
github_search_code: "GitHub code search"
|
||||||
|
@ -243,6 +248,7 @@ en:
|
||||||
search_meta_discourse: "Search Meta Discourse"
|
search_meta_discourse: "Search Meta Discourse"
|
||||||
javascript_evaluator: "Evaluate JavaScript"
|
javascript_evaluator: "Evaluate JavaScript"
|
||||||
tool_help:
|
tool_help:
|
||||||
|
create_artifact: "Create a web artifact using the AI Bot"
|
||||||
web_browser: "Browse web page using the AI Bot"
|
web_browser: "Browse web page using the AI Bot"
|
||||||
github_search_code: "Search for code in a GitHub repository"
|
github_search_code: "Search for code in a GitHub repository"
|
||||||
github_search_files: "Search for files in a GitHub repository"
|
github_search_files: "Search for files in a GitHub repository"
|
||||||
|
@ -264,6 +270,7 @@ en:
|
||||||
search_meta_discourse: "Search Meta Discourse"
|
search_meta_discourse: "Search Meta Discourse"
|
||||||
javascript_evaluator: "Evaluate JavaScript"
|
javascript_evaluator: "Evaluate JavaScript"
|
||||||
tool_description:
|
tool_description:
|
||||||
|
create_artifact: "Created a web artifact using the AI Bot"
|
||||||
web_browser: "Reading <a href='%{url}'>%{url}</a>"
|
web_browser: "Reading <a href='%{url}'>%{url}</a>"
|
||||||
github_search_files: "Searched for '%{keywords}' in %{repo}/%{branch}"
|
github_search_files: "Searched for '%{keywords}' in %{repo}/%{branch}"
|
||||||
github_search_code: "Searched for '%{query}' in %{repo}"
|
github_search_code: "Searched for '%{query}' in %{repo}"
|
||||||
|
|
|
@ -33,6 +33,10 @@ DiscourseAi::Engine.routes.draw do
|
||||||
get "/preview/:topic_id" => "shared_ai_conversations#preview"
|
get "/preview/:topic_id" => "shared_ai_conversations#preview"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
scope module: :ai_bot, path: "/ai-bot/artifacts" do
|
||||||
|
get "/:id" => "artifacts#show"
|
||||||
|
end
|
||||||
|
|
||||||
scope module: :summarization, path: "/summarization", defaults: { format: :json } do
|
scope module: :summarization, path: "/summarization", defaults: { format: :json } do
|
||||||
get "/t/:topic_id" => "summary#show", :constraints => { topic_id: /\d+/ }
|
get "/t/:topic_id" => "summary#show", :constraints => { topic_id: /\d+/ }
|
||||||
get "/channels/:channel_id" => "chat_summary#show"
|
get "/channels/:channel_id" => "chat_summary#show"
|
||||||
|
|
|
@ -2,7 +2,14 @@ discourse_ai:
|
||||||
discourse_ai_enabled:
|
discourse_ai_enabled:
|
||||||
default: true
|
default: true
|
||||||
client: true
|
client: true
|
||||||
|
ai_artifact_security:
|
||||||
|
client: true
|
||||||
|
type: enum
|
||||||
|
default: "strict"
|
||||||
|
choices:
|
||||||
|
- "disabled"
|
||||||
|
- "lax"
|
||||||
|
- "strict"
|
||||||
ai_toxicity_enabled:
|
ai_toxicity_enabled:
|
||||||
default: false
|
default: false
|
||||||
client: true
|
client: true
|
||||||
|
|
|
@ -5,7 +5,12 @@ DiscourseAi::AiBot::Personas::Persona.system_personas.each do |persona_class, id
|
||||||
if !persona
|
if !persona
|
||||||
persona = AiPersona.new
|
persona = AiPersona.new
|
||||||
persona.id = id
|
persona.id = id
|
||||||
persona.allowed_group_ids = [Group::AUTO_GROUPS[:trust_level_0]]
|
if persona_class == DiscourseAi::AiBot::Personas::WebArtifactCreator
|
||||||
|
# this is somewhat sensitive, so we default it to staff
|
||||||
|
persona.allowed_group_ids = [Group::AUTO_GROUPS[:staff]]
|
||||||
|
else
|
||||||
|
persona.allowed_group_ids = [Group::AUTO_GROUPS[:trust_level_0]]
|
||||||
|
end
|
||||||
persona.enabled = true
|
persona.enabled = true
|
||||||
persona.priority = true if persona_class == DiscourseAi::AiBot::Personas::General
|
persona.priority = true if persona_class == DiscourseAi::AiBot::Personas::General
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
class AddAiArtifacts < ActiveRecord::Migration[7.1]
|
||||||
|
def change
|
||||||
|
create_table :ai_artifacts do |t|
|
||||||
|
t.integer :user_id, null: false
|
||||||
|
t.integer :post_id, null: false
|
||||||
|
t.string :name, null: false, limit: 255
|
||||||
|
t.string :html, limit: 65_535 # ~64KB limit
|
||||||
|
t.string :css, limit: 65_535 # ~64KB limit
|
||||||
|
t.string :js, limit: 65_535 # ~64KB limit
|
||||||
|
t.jsonb :metadata # For any additional properties
|
||||||
|
|
||||||
|
t.timestamps
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -106,12 +106,39 @@ module DiscourseAi
|
||||||
tool_found = false
|
tool_found = false
|
||||||
force_tool_if_needed(prompt, context)
|
force_tool_if_needed(prompt, context)
|
||||||
|
|
||||||
|
tool_halted = false
|
||||||
|
|
||||||
|
allow_partial_tool_calls = persona.allow_partial_tool_calls?
|
||||||
|
existing_tools = Set.new
|
||||||
|
|
||||||
result =
|
result =
|
||||||
llm.generate(prompt, feature_name: "bot", **llm_kwargs) do |partial, cancel|
|
llm.generate(
|
||||||
tool = persona.find_tool(partial, bot_user: user, llm: llm, context: context)
|
prompt,
|
||||||
|
feature_name: "bot",
|
||||||
|
partial_tool_calls: allow_partial_tool_calls,
|
||||||
|
**llm_kwargs,
|
||||||
|
) do |partial, cancel|
|
||||||
|
tool =
|
||||||
|
persona.find_tool(
|
||||||
|
partial,
|
||||||
|
bot_user: user,
|
||||||
|
llm: llm,
|
||||||
|
context: context,
|
||||||
|
existing_tools: existing_tools,
|
||||||
|
)
|
||||||
tool = nil if tools_ran >= MAX_TOOLS
|
tool = nil if tools_ran >= MAX_TOOLS
|
||||||
|
|
||||||
if tool.present?
|
if tool.present?
|
||||||
|
existing_tools << tool
|
||||||
|
tool_call = partial
|
||||||
|
if tool_call.partial?
|
||||||
|
if tool.class.allow_partial_tool_calls?
|
||||||
|
tool.partial_invoke
|
||||||
|
update_blk.call("", cancel, tool.custom_raw, :partial_tool)
|
||||||
|
end
|
||||||
|
next
|
||||||
|
end
|
||||||
|
|
||||||
tool_found = true
|
tool_found = true
|
||||||
# a bit hacky, but extra newlines do no harm
|
# a bit hacky, but extra newlines do no harm
|
||||||
if needs_newlines
|
if needs_newlines
|
||||||
|
@ -122,7 +149,10 @@ module DiscourseAi
|
||||||
process_tool(tool, raw_context, llm, cancel, update_blk, prompt, context)
|
process_tool(tool, raw_context, llm, cancel, update_blk, prompt, context)
|
||||||
tools_ran += 1
|
tools_ran += 1
|
||||||
ongoing_chain &&= tool.chain_next_response?
|
ongoing_chain &&= tool.chain_next_response?
|
||||||
|
|
||||||
|
tool_halted = true if !tool.chain_next_response?
|
||||||
else
|
else
|
||||||
|
next if tool_halted
|
||||||
needs_newlines = true
|
needs_newlines = true
|
||||||
if partial.is_a?(DiscourseAi::Completions::ToolCall)
|
if partial.is_a?(DiscourseAi::Completions::ToolCall)
|
||||||
Rails.logger.warn("DiscourseAi: Tool not found: #{partial.name}")
|
Rails.logger.warn("DiscourseAi: Tool not found: #{partial.name}")
|
||||||
|
@ -185,7 +215,7 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def invoke_tool(tool, llm, cancel, context, &update_blk)
|
def invoke_tool(tool, llm, cancel, context, &update_blk)
|
||||||
show_placeholder = !context[:skip_tool_details]
|
show_placeholder = !context[:skip_tool_details] && !tool.class.allow_partial_tool_calls?
|
||||||
|
|
||||||
update_blk.call("", cancel, build_placeholder(tool.summary, "")) if show_placeholder
|
update_blk.call("", cancel, build_placeholder(tool.summary, "")) if show_placeholder
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@ module DiscourseAi
|
||||||
Personas::DallE3 => -7,
|
Personas::DallE3 => -7,
|
||||||
Personas::DiscourseHelper => -8,
|
Personas::DiscourseHelper => -8,
|
||||||
Personas::GithubHelper => -9,
|
Personas::GithubHelper => -9,
|
||||||
|
Personas::WebArtifactCreator => -10,
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -98,6 +99,7 @@ module DiscourseAi
|
||||||
Tools::JavascriptEvaluator,
|
Tools::JavascriptEvaluator,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
tools << Tools::CreateArtifact if SiteSetting.ai_artifact_security.in?(%w[lax strict])
|
||||||
tools << Tools::GithubSearchCode if SiteSetting.ai_bot_github_access_token.present?
|
tools << Tools::GithubSearchCode if SiteSetting.ai_bot_github_access_token.present?
|
||||||
|
|
||||||
tools << Tools::ListTags if SiteSetting.tagging_enabled
|
tools << Tools::ListTags if SiteSetting.tagging_enabled
|
||||||
|
@ -199,14 +201,24 @@ module DiscourseAi
|
||||||
prompt
|
prompt
|
||||||
end
|
end
|
||||||
|
|
||||||
def find_tool(partial, bot_user:, llm:, context:)
|
def find_tool(partial, bot_user:, llm:, context:, existing_tools: [])
|
||||||
return nil if !partial.is_a?(DiscourseAi::Completions::ToolCall)
|
return nil if !partial.is_a?(DiscourseAi::Completions::ToolCall)
|
||||||
tool_instance(partial, bot_user: bot_user, llm: llm, context: context)
|
tool_instance(
|
||||||
|
partial,
|
||||||
|
bot_user: bot_user,
|
||||||
|
llm: llm,
|
||||||
|
context: context,
|
||||||
|
existing_tools: existing_tools,
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def allow_partial_tool_calls?
|
||||||
|
available_tools.any? { |tool| tool.allow_partial_tool_calls? }
|
||||||
end
|
end
|
||||||
|
|
||||||
protected
|
protected
|
||||||
|
|
||||||
def tool_instance(tool_call, bot_user:, llm:, context:)
|
def tool_instance(tool_call, bot_user:, llm:, context:, existing_tools:)
|
||||||
function_id = tool_call.id
|
function_id = tool_call.id
|
||||||
function_name = tool_call.name
|
function_name = tool_call.name
|
||||||
return nil if function_name.nil?
|
return nil if function_name.nil?
|
||||||
|
@ -240,14 +252,22 @@ module DiscourseAi
|
||||||
arguments[name.to_sym] = value if value
|
arguments[name.to_sym] = value if value
|
||||||
end
|
end
|
||||||
|
|
||||||
tool_klass.new(
|
tool_instance =
|
||||||
arguments,
|
existing_tools.find { |t| t.name == function_name && t.tool_call_id == function_id }
|
||||||
tool_call_id: function_id || function_name,
|
|
||||||
persona_options: options[tool_klass].to_h,
|
if tool_instance
|
||||||
bot_user: bot_user,
|
tool_instance.parameters = arguments
|
||||||
llm: llm,
|
tool_instance
|
||||||
context: context,
|
else
|
||||||
)
|
tool_klass.new(
|
||||||
|
arguments,
|
||||||
|
tool_call_id: function_id || function_name,
|
||||||
|
persona_options: options[tool_klass].to_h,
|
||||||
|
bot_user: bot_user,
|
||||||
|
llm: llm,
|
||||||
|
context: context,
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def strip_quotes(value)
|
def strip_quotes(value)
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
#frozen_string_literal: true
|
||||||
|
|
||||||
|
module DiscourseAi
|
||||||
|
module AiBot
|
||||||
|
module Personas
|
||||||
|
class WebArtifactCreator < Persona
|
||||||
|
def tools
|
||||||
|
[Tools::CreateArtifact]
|
||||||
|
end
|
||||||
|
|
||||||
|
def required_tools
|
||||||
|
[Tools::CreateArtifact]
|
||||||
|
end
|
||||||
|
|
||||||
|
def system_prompt
|
||||||
|
<<~PROMPT
|
||||||
|
You are the Web Creator, an AI assistant specializing in building interactive web components. You create engaging and functional web experiences using HTML, CSS, and JavaScript. You live in a Discourse PM and communicate using Markdown.
|
||||||
|
|
||||||
|
Core Principles:
|
||||||
|
- Create delightful, interactive experiences
|
||||||
|
- Focus on visual appeal and smooth animations
|
||||||
|
- Write clean, efficient code
|
||||||
|
- Build progressively (HTML structure → CSS styling → JavaScript interactivity)
|
||||||
|
- Keep components focused and purposeful
|
||||||
|
|
||||||
|
When creating:
|
||||||
|
1. Understand the desired user experience
|
||||||
|
2. Break down complex interactions into simple components
|
||||||
|
3. Use semantic HTML for strong foundations
|
||||||
|
4. Style thoughtfully with CSS
|
||||||
|
5. Add JavaScript for rich interactivity
|
||||||
|
6. Consider responsive design
|
||||||
|
|
||||||
|
Best Practices:
|
||||||
|
- Leverage native HTML elements for better functionality
|
||||||
|
- Use CSS transforms and transitions for smooth animations
|
||||||
|
- Keep JavaScript modular and event-driven
|
||||||
|
- Make content responsive and adaptive
|
||||||
|
- Create self-contained components
|
||||||
|
|
||||||
|
When responding:
|
||||||
|
1. Ask clarifying questions if the request is ambiguous
|
||||||
|
2. Briefly explain your approach
|
||||||
|
3. Build features iteratively
|
||||||
|
4. Describe the interactive elements
|
||||||
|
5. Test your solution conceptually
|
||||||
|
|
||||||
|
Your goal is to transform ideas into engaging web experiences. Be creative and practical, focusing on making interfaces that are both beautiful and functional.
|
||||||
|
|
||||||
|
Remember: Great components combine structure (HTML), presentation (CSS), and behavior (JavaScript) to create memorable user experiences.
|
||||||
|
PROMPT
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -399,7 +399,7 @@ module DiscourseAi
|
||||||
PostCustomPrompt.none
|
PostCustomPrompt.none
|
||||||
|
|
||||||
reply = +""
|
reply = +""
|
||||||
start = Time.now
|
post_streamer = nil
|
||||||
|
|
||||||
post_type =
|
post_type =
|
||||||
post.post_type == Post.types[:whisper] ? Post.types[:whisper] : Post.types[:regular]
|
post.post_type == Post.types[:whisper] ? Post.types[:whisper] : Post.types[:regular]
|
||||||
|
@ -448,35 +448,35 @@ module DiscourseAi
|
||||||
|
|
||||||
context[:skip_tool_details] ||= !bot.persona.class.tool_details
|
context[:skip_tool_details] ||= !bot.persona.class.tool_details
|
||||||
|
|
||||||
|
post_streamer = PostStreamer.new(delay: Rails.env.test? ? 0 : 0.5) if stream_reply
|
||||||
|
|
||||||
new_custom_prompts =
|
new_custom_prompts =
|
||||||
bot.reply(context) do |partial, cancel, placeholder, type|
|
bot.reply(context) do |partial, cancel, placeholder, type|
|
||||||
reply << partial
|
reply << partial
|
||||||
raw = reply.dup
|
raw = reply.dup
|
||||||
raw << "\n\n" << placeholder if placeholder.present?
|
raw << "\n\n" << placeholder if placeholder.present?
|
||||||
|
|
||||||
blk.call(partial) if blk && type != :tool_details
|
blk.call(partial) if blk && type != :tool_details && type != :partial_tool
|
||||||
|
|
||||||
if stream_reply && !Discourse.redis.get(redis_stream_key)
|
if stream_reply && !Discourse.redis.get(redis_stream_key)
|
||||||
cancel&.call
|
cancel&.call
|
||||||
reply_post.update!(raw: reply, cooked: PrettyText.cook(reply))
|
reply_post.update!(raw: reply, cooked: PrettyText.cook(reply))
|
||||||
end
|
end
|
||||||
|
|
||||||
if stream_reply
|
if post_streamer
|
||||||
# Minor hack to skip the delay during tests.
|
post_streamer.run_later do
|
||||||
if placeholder.blank?
|
Discourse.redis.expire(redis_stream_key, 60)
|
||||||
next if (Time.now - start < 0.5) && !Rails.env.test?
|
publish_update(reply_post, { raw: raw })
|
||||||
start = Time.now
|
|
||||||
end
|
end
|
||||||
|
|
||||||
Discourse.redis.expire(redis_stream_key, 60)
|
|
||||||
|
|
||||||
publish_update(reply_post, { raw: raw })
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
return if reply.blank?
|
return if reply.blank?
|
||||||
|
|
||||||
if stream_reply
|
if stream_reply
|
||||||
|
post_streamer.finish
|
||||||
|
post_streamer = nil
|
||||||
|
|
||||||
# land the final message prior to saving so we don't clash
|
# land the final message prior to saving so we don't clash
|
||||||
reply_post.cooked = PrettyText.cook(reply)
|
reply_post.cooked = PrettyText.cook(reply)
|
||||||
publish_final_update(reply_post)
|
publish_final_update(reply_post)
|
||||||
|
@ -514,6 +514,7 @@ module DiscourseAi
|
||||||
|
|
||||||
reply_post
|
reply_post
|
||||||
ensure
|
ensure
|
||||||
|
post_streamer&.finish(skip_callback: true)
|
||||||
publish_final_update(reply_post) if stream_reply
|
publish_final_update(reply_post) if stream_reply
|
||||||
if reply_post && post.post_number == 1 && post.topic.private_message?
|
if reply_post && post.post_number == 1 && post.topic.private_message?
|
||||||
title_playground(reply_post)
|
title_playground(reply_post)
|
||||||
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module DiscourseAi
|
||||||
|
module AiBot
|
||||||
|
class PostStreamer
|
||||||
|
def initialize(delay: 0.5)
|
||||||
|
@mutex = Mutex.new
|
||||||
|
@callback = nil
|
||||||
|
@delay = delay
|
||||||
|
@done = false
|
||||||
|
end
|
||||||
|
|
||||||
|
def run_later(&callback)
|
||||||
|
@mutex.synchronize { @callback = callback }
|
||||||
|
ensure_worker!
|
||||||
|
end
|
||||||
|
|
||||||
|
def finish(skip_callback: false)
|
||||||
|
@mutex.synchronize do
|
||||||
|
@callback&.call if skip_callback
|
||||||
|
@callback = nil
|
||||||
|
@done = true
|
||||||
|
end
|
||||||
|
|
||||||
|
begin
|
||||||
|
@worker_thread&.wakeup
|
||||||
|
rescue StandardError
|
||||||
|
ThreadError
|
||||||
|
end
|
||||||
|
@worker_thread&.join
|
||||||
|
@worker_thread = nil
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def run
|
||||||
|
while !@done
|
||||||
|
@mutex.synchronize do
|
||||||
|
callback = @callback
|
||||||
|
@callback = nil
|
||||||
|
callback&.call
|
||||||
|
end
|
||||||
|
sleep @delay
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_worker!
|
||||||
|
return if @worker_thread
|
||||||
|
@mutex.synchronize do
|
||||||
|
return if @worker_thread
|
||||||
|
db = RailsMultisite::ConnectionManagement.current_db
|
||||||
|
@worker_thread =
|
||||||
|
Thread.new { RailsMultisite::ConnectionManagement.with_connection(db) { run } }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,137 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
module DiscourseAi
|
||||||
|
module AiBot
|
||||||
|
module Tools
|
||||||
|
class CreateArtifact < Tool
|
||||||
|
def self.name
|
||||||
|
"create_artifact"
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.signature
|
||||||
|
{
|
||||||
|
name: "create_artifact",
|
||||||
|
description:
|
||||||
|
"Creates a web artifact with HTML, CSS, and JavaScript that can be displayed in an iframe",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
description: "A name for the artifact (max 255 chars)",
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "html_body",
|
||||||
|
description: "The HTML content for the BODY tag (do not include the BODY tag)",
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{ name: "css", description: "Optional CSS styles for the artifact", type: "string" },
|
||||||
|
{
|
||||||
|
name: "js",
|
||||||
|
description:
|
||||||
|
"Optional
|
||||||
|
JavaScript code for the artifact",
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.allow_partial_tool_calls?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def partial_invoke
|
||||||
|
@selected_tab = :html_body
|
||||||
|
if @prev_parameters
|
||||||
|
@selected_tab = parameters.keys.find { |k| @prev_parameters[k] != parameters[k] }
|
||||||
|
end
|
||||||
|
update_custom_html
|
||||||
|
@prev_parameters = parameters.dup
|
||||||
|
end
|
||||||
|
|
||||||
|
def invoke
|
||||||
|
yield parameters[:name] || "Web Artifact"
|
||||||
|
# Get the current post from context
|
||||||
|
post = Post.find_by(id: context[:post_id])
|
||||||
|
return error_response("No post context found") unless post
|
||||||
|
|
||||||
|
html = parameters[:html_body].to_s
|
||||||
|
css = parameters[:css].to_s
|
||||||
|
js = parameters[:js].to_s
|
||||||
|
|
||||||
|
# Create the artifact
|
||||||
|
artifact =
|
||||||
|
AiArtifact.new(
|
||||||
|
user_id: bot_user.id,
|
||||||
|
post_id: post.id,
|
||||||
|
name: parameters[:name].to_s[0...255],
|
||||||
|
html: html,
|
||||||
|
css: css,
|
||||||
|
js: js,
|
||||||
|
metadata: parameters[:metadata],
|
||||||
|
)
|
||||||
|
|
||||||
|
if artifact.save
|
||||||
|
update_custom_html(artifact)
|
||||||
|
success_response(artifact)
|
||||||
|
else
|
||||||
|
error_response(artifact.errors.full_messages.join(", "))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def chain_next_response?
|
||||||
|
@chain_next_response
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def update_custom_html(artifact = nil)
|
||||||
|
html = parameters[:html_body].to_s
|
||||||
|
css = parameters[:css].to_s
|
||||||
|
js = parameters[:js].to_s
|
||||||
|
|
||||||
|
artifact_div =
|
||||||
|
"<div class=\"ai-artifact\" data-ai-artifact-id=#{artifact.id}></div>" if artifact
|
||||||
|
|
||||||
|
content = []
|
||||||
|
|
||||||
|
content << [:html_body, "### HTML\n\n```html\n#{html}\n```"] if html.present?
|
||||||
|
|
||||||
|
content << [:css, "### CSS\n\n```css\n#{css}\n```"] if css.present?
|
||||||
|
|
||||||
|
content << [:js, "### JavaScript\n\n```javascript\n#{js}\n```"] if js.present?
|
||||||
|
|
||||||
|
content << [:preview, "### Preview\n\n#{artifact_div}"] if artifact_div
|
||||||
|
|
||||||
|
content.sort_by! { |c| c[0] === @selected_tab ? 1 : 0 } if !artifact
|
||||||
|
|
||||||
|
self.custom_raw = content.map { |c| c[1] }.join("\n\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
def success_response(artifact)
|
||||||
|
@chain_next_response = false
|
||||||
|
|
||||||
|
{
|
||||||
|
status: "success",
|
||||||
|
artifact_id: artifact.id,
|
||||||
|
message: "Artifact created successfully and rendered to user.",
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def error_response(message)
|
||||||
|
@chain_next_response = false
|
||||||
|
|
||||||
|
{ status: "error", error: message }
|
||||||
|
end
|
||||||
|
|
||||||
|
def help
|
||||||
|
"Creates a web artifact with HTML, CSS, and JavaScript that can be displayed in an iframe. " \
|
||||||
|
"Requires a name and HTML content. CSS and JavaScript are optional. " \
|
||||||
|
"The artifact will be associated with the current post and can be displayed using an iframe."
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -38,10 +38,14 @@ module DiscourseAi
|
||||||
def custom_system_message
|
def custom_system_message
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def allow_partial_tool_calls?
|
||||||
|
false
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
attr_accessor :custom_raw
|
attr_accessor :custom_raw, :parameters
|
||||||
attr_reader :tool_call_id, :persona_options, :bot_user, :llm, :context, :parameters
|
attr_reader :tool_call_id, :persona_options, :bot_user, :llm, :context
|
||||||
|
|
||||||
def initialize(
|
def initialize(
|
||||||
parameters,
|
parameters,
|
||||||
|
|
|
@ -35,6 +35,8 @@ class DiscourseAi::Completions::AnthropicMessageProcessor
|
||||||
|
|
||||||
def to_tool_call
|
def to_tool_call
|
||||||
parameters = JSON.parse(raw_json, symbolize_names: true)
|
parameters = JSON.parse(raw_json, symbolize_names: true)
|
||||||
|
# we dupe to avoid poisoning the original tool call
|
||||||
|
@tool_call = @tool_call.dup
|
||||||
@tool_call.partial = false
|
@tool_call.partial = false
|
||||||
@tool_call.parameters = parameters
|
@tool_call.parameters = parameters
|
||||||
@tool_call
|
@tool_call
|
||||||
|
|
|
@ -44,17 +44,31 @@ module DiscourseAi
|
||||||
llm_model.provider == "open_ai" && llm_model.name.include?("o1-")
|
llm_model.provider == "open_ai" && llm_model.name.include?("o1-")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def disable_native_tools?
|
||||||
|
return @disable_native_tools if defined?(@disable_native_tools)
|
||||||
|
!!@disable_native_tools = llm_model.lookup_custom_param("disable_native_tools")
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def tools_dialect
|
def tools_dialect
|
||||||
@tools_dialect ||= DiscourseAi::Completions::Dialects::OpenAiTools.new(prompt.tools)
|
if disable_native_tools?
|
||||||
|
super
|
||||||
|
else
|
||||||
|
@tools_dialect ||= DiscourseAi::Completions::Dialects::OpenAiTools.new(prompt.tools)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def system_msg(msg)
|
def system_msg(msg)
|
||||||
|
content = msg[:content]
|
||||||
|
if disable_native_tools? && tools_dialect.instructions.present?
|
||||||
|
content = content + "\n\n" + tools_dialect.instructions
|
||||||
|
end
|
||||||
|
|
||||||
if is_gpt_o?
|
if is_gpt_o?
|
||||||
{ role: "user", content: msg[:content] }
|
{ role: "user", content: content }
|
||||||
else
|
else
|
||||||
{ role: "system", content: msg[:content] }
|
{ role: "system", content: content }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -63,11 +77,19 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_call_msg(msg)
|
def tool_call_msg(msg)
|
||||||
tools_dialect.from_raw_tool_call(msg)
|
if disable_native_tools?
|
||||||
|
super
|
||||||
|
else
|
||||||
|
tools_dialect.from_raw_tool_call(msg)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_msg(msg)
|
def tool_msg(msg)
|
||||||
tools_dialect.from_raw_tool(msg)
|
if disable_native_tools?
|
||||||
|
super
|
||||||
|
else
|
||||||
|
tools_dialect.from_raw_tool(msg)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def user_msg(msg)
|
def user_msg(msg)
|
||||||
|
|
|
@ -168,7 +168,7 @@ module DiscourseAi
|
||||||
raise NotImplemented
|
raise NotImplemented
|
||||||
end
|
end
|
||||||
|
|
||||||
def assistant_msg(msg)
|
def model_msg(msg)
|
||||||
raise NotImplemented
|
raise NotImplemented
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -177,11 +177,15 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_call_msg(msg)
|
def tool_call_msg(msg)
|
||||||
{ role: "assistant", content: tools_dialect.from_raw_tool_call(msg) }
|
new_content = tools_dialect.from_raw_tool_call(msg)
|
||||||
|
msg = msg.merge(content: new_content)
|
||||||
|
model_msg(msg)
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_msg(msg)
|
def tool_msg(msg)
|
||||||
{ role: "user", content: tools_dialect.from_raw_tool(msg) }
|
new_content = tools_dialect.from_raw_tool(msg)
|
||||||
|
msg = msg.merge(content: new_content)
|
||||||
|
user_msg(msg)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,7 +11,7 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def native_tool_support?
|
def native_tool_support?
|
||||||
true
|
!llm_model.lookup_custom_param("disable_native_tools")
|
||||||
end
|
end
|
||||||
|
|
||||||
def translate
|
def translate
|
||||||
|
@ -84,10 +84,16 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def system_msg(msg)
|
def system_msg(msg)
|
||||||
|
content = msg[:content]
|
||||||
|
|
||||||
|
if !native_tool_support? && tools_dialect.instructions.present?
|
||||||
|
content = content.to_s + "\n\n#{tools_dialect.instructions}"
|
||||||
|
end
|
||||||
|
|
||||||
if beta_api?
|
if beta_api?
|
||||||
{ role: "system", content: msg[:content] }
|
{ role: "system", content: content }
|
||||||
else
|
else
|
||||||
{ role: "user", parts: { text: msg[:content] } }
|
{ role: "user", parts: { text: content } }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -125,35 +131,43 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_call_msg(msg)
|
def tool_call_msg(msg)
|
||||||
call_details = JSON.parse(msg[:content], symbolize_names: true)
|
if native_tool_support?
|
||||||
part = {
|
call_details = JSON.parse(msg[:content], symbolize_names: true)
|
||||||
functionCall: {
|
part = {
|
||||||
name: msg[:name] || call_details[:name],
|
functionCall: {
|
||||||
args: call_details[:arguments],
|
name: msg[:name] || call_details[:name],
|
||||||
},
|
args: call_details[:arguments],
|
||||||
}
|
},
|
||||||
|
}
|
||||||
|
|
||||||
if beta_api?
|
if beta_api?
|
||||||
{ role: "model", parts: [part] }
|
{ role: "model", parts: [part] }
|
||||||
|
else
|
||||||
|
{ role: "model", parts: part }
|
||||||
|
end
|
||||||
else
|
else
|
||||||
{ role: "model", parts: part }
|
super
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_msg(msg)
|
def tool_msg(msg)
|
||||||
part = {
|
if native_tool_support?
|
||||||
functionResponse: {
|
part = {
|
||||||
name: msg[:name] || msg[:id],
|
functionResponse: {
|
||||||
response: {
|
name: msg[:name] || msg[:id],
|
||||||
content: msg[:content],
|
response: {
|
||||||
|
content: msg[:content],
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if beta_api?
|
if beta_api?
|
||||||
{ role: "function", parts: [part] }
|
{ role: "function", parts: [part] }
|
||||||
|
else
|
||||||
|
{ role: "function", parts: part }
|
||||||
|
end
|
||||||
else
|
else
|
||||||
{ role: "function", parts: part }
|
super
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -37,11 +37,19 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_call_msg(msg)
|
def tool_call_msg(msg)
|
||||||
tools_dialect.from_raw_tool_call(msg)
|
if enable_native_tool?
|
||||||
|
tools_dialect.from_raw_tool_call(msg)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def tool_msg(msg)
|
def tool_msg(msg)
|
||||||
tools_dialect.from_raw_tool(msg)
|
if enable_native_tool?
|
||||||
|
tools_dialect.from_raw_tool(msg)
|
||||||
|
else
|
||||||
|
super
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def system_msg(msg)
|
def system_msg(msg)
|
||||||
|
|
|
@ -118,8 +118,9 @@ module DiscourseAi
|
||||||
If you wish to call multiple function in one reply, wrap multiple <invoke>
|
If you wish to call multiple function in one reply, wrap multiple <invoke>
|
||||||
block in a single <function_calls> block.
|
block in a single <function_calls> block.
|
||||||
|
|
||||||
Always prefer to lead with tool calls, if you need to execute any.
|
- Always prefer to lead with tool calls, if you need to execute any.
|
||||||
Avoid all niceties prior to tool calls, Eg: "Let me look this up for you.." etc.
|
- Avoid all niceties prior to tool calls, Eg: "Let me look this up for you.." etc.
|
||||||
|
- DO NOT encode HTML entities in tool calls. You may use <![CDATA[...]]> for encoding if required.
|
||||||
Here are the complete list of tools available:
|
Here are the complete list of tools available:
|
||||||
TEXT
|
TEXT
|
||||||
end
|
end
|
||||||
|
|
|
@ -32,7 +32,11 @@ module DiscourseAi
|
||||||
llm_model.name
|
llm_model.name
|
||||||
end
|
end
|
||||||
|
|
||||||
options = { model: mapped_model, max_tokens: 3_000 }
|
# Note: Anthropic requires this param
|
||||||
|
max_tokens = 4096
|
||||||
|
max_tokens = 8192 if mapped_model.match?(/3.5/)
|
||||||
|
|
||||||
|
options = { model: mapped_model, max_tokens: max_tokens }
|
||||||
|
|
||||||
options[:stop_sequences] = ["</function_calls>"] if !dialect.native_tool_support? &&
|
options[:stop_sequences] = ["</function_calls>"] if !dialect.native_tool_support? &&
|
||||||
dialect.prompt.has_tools?
|
dialect.prompt.has_tools?
|
||||||
|
|
|
@ -96,8 +96,10 @@ module DiscourseAi
|
||||||
raise CompletionFailed, response.body
|
raise CompletionFailed, response.body
|
||||||
end
|
end
|
||||||
|
|
||||||
xml_tool_processor = XmlToolProcessor.new if xml_tools_enabled? &&
|
xml_tool_processor =
|
||||||
dialect.prompt.has_tools?
|
XmlToolProcessor.new(
|
||||||
|
partial_tool_calls: partial_tool_calls,
|
||||||
|
) if xml_tools_enabled? && dialect.prompt.has_tools?
|
||||||
|
|
||||||
to_strip = xml_tags_to_strip(dialect)
|
to_strip = xml_tags_to_strip(dialect)
|
||||||
xml_stripper =
|
xml_stripper =
|
||||||
|
|
|
@ -58,7 +58,9 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def prepare_payload(prompt, model_params, dialect)
|
def prepare_payload(prompt, model_params, dialect)
|
||||||
tools = dialect.tools
|
@native_tool_support = dialect.native_tool_support?
|
||||||
|
|
||||||
|
tools = dialect.tools if @native_tool_support
|
||||||
|
|
||||||
payload = default_options.merge(contents: prompt[:messages])
|
payload = default_options.merge(contents: prompt[:messages])
|
||||||
payload[:systemInstruction] = {
|
payload[:systemInstruction] = {
|
||||||
|
@ -144,6 +146,7 @@ module DiscourseAi
|
||||||
|
|
||||||
def decode(chunk)
|
def decode(chunk)
|
||||||
json = JSON.parse(chunk, symbolize_names: true)
|
json = JSON.parse(chunk, symbolize_names: true)
|
||||||
|
|
||||||
idx = -1
|
idx = -1
|
||||||
json
|
json
|
||||||
.dig(:candidates, 0, :content, :parts)
|
.dig(:candidates, 0, :content, :parts)
|
||||||
|
@ -168,30 +171,28 @@ module DiscourseAi
|
||||||
|
|
||||||
def decode_chunk(chunk)
|
def decode_chunk(chunk)
|
||||||
@tool_index ||= -1
|
@tool_index ||= -1
|
||||||
|
|
||||||
streaming_decoder
|
streaming_decoder
|
||||||
.decode(chunk)
|
.decode(chunk)
|
||||||
.map do |parsed|
|
.map do |parsed|
|
||||||
update_usage(parsed)
|
update_usage(parsed)
|
||||||
parsed
|
parts = parsed.dig(:candidates, 0, :content, :parts)
|
||||||
.dig(:candidates, 0, :content, :parts)
|
parts&.map do |part|
|
||||||
.map do |part|
|
if part[:text]
|
||||||
if part[:text]
|
part = part[:text]
|
||||||
part = part[:text]
|
if part != ""
|
||||||
if part != ""
|
part
|
||||||
part
|
else
|
||||||
else
|
nil
|
||||||
nil
|
|
||||||
end
|
|
||||||
elsif part[:functionCall]
|
|
||||||
@tool_index += 1
|
|
||||||
ToolCall.new(
|
|
||||||
id: "tool_#{@tool_index}",
|
|
||||||
name: part[:functionCall][:name],
|
|
||||||
parameters: part[:functionCall][:args],
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
elsif part[:functionCall]
|
||||||
|
@tool_index += 1
|
||||||
|
ToolCall.new(
|
||||||
|
id: "tool_#{@tool_index}",
|
||||||
|
name: part[:functionCall][:name],
|
||||||
|
parameters: part[:functionCall][:args],
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
.flatten
|
.flatten
|
||||||
.compact
|
.compact
|
||||||
|
@ -223,7 +224,7 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def xml_tools_enabled?
|
def xml_tools_enabled?
|
||||||
false
|
!@native_tool_support
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -36,14 +36,8 @@ module DiscourseAi
|
||||||
partial_tool_calls: false,
|
partial_tool_calls: false,
|
||||||
&blk
|
&blk
|
||||||
)
|
)
|
||||||
if dialect.respond_to?(:is_gpt_o?) && dialect.is_gpt_o? && block_given?
|
@disable_native_tools = dialect.disable_native_tools?
|
||||||
# we need to disable streaming and simulate it
|
super
|
||||||
blk.call "", lambda { |*| }
|
|
||||||
response = super(dialect, user, model_params, feature_name: feature_name, &nil)
|
|
||||||
blk.call response, lambda { |*| }
|
|
||||||
else
|
|
||||||
super
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
@ -69,10 +63,17 @@ module DiscourseAi
|
||||||
# We'll fallback to guess this using the tokenizer.
|
# We'll fallback to guess this using the tokenizer.
|
||||||
payload[:stream_options] = { include_usage: true } if llm_model.provider == "open_ai"
|
payload[:stream_options] = { include_usage: true } if llm_model.provider == "open_ai"
|
||||||
end
|
end
|
||||||
if dialect.tools.present?
|
if !xml_tools_enabled?
|
||||||
payload[:tools] = dialect.tools
|
if dialect.tools.present?
|
||||||
if dialect.tool_choice.present?
|
payload[:tools] = dialect.tools
|
||||||
payload[:tool_choice] = { type: "function", function: { name: dialect.tool_choice } }
|
if dialect.tool_choice.present?
|
||||||
|
payload[:tool_choice] = {
|
||||||
|
type: "function",
|
||||||
|
function: {
|
||||||
|
name: dialect.tool_choice,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
payload
|
payload
|
||||||
|
@ -121,7 +122,7 @@ module DiscourseAi
|
||||||
end
|
end
|
||||||
|
|
||||||
def xml_tools_enabled?
|
def xml_tools_enabled?
|
||||||
false
|
!!@disable_native_tools
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
|
@ -31,9 +31,8 @@ module DiscourseAi
|
||||||
tokens: 200_000,
|
tokens: 200_000,
|
||||||
display_name: "Claude 3.5 Sonnet",
|
display_name: "Claude 3.5 Sonnet",
|
||||||
},
|
},
|
||||||
|
{ name: "claude-3-5-haiku", tokens: 200_000, display_name: "Claude 3.5 Haiku" },
|
||||||
{ name: "claude-3-opus", tokens: 200_000, display_name: "Claude 3 Opus" },
|
{ name: "claude-3-opus", tokens: 200_000, display_name: "Claude 3 Opus" },
|
||||||
{ name: "claude-3-sonnet", tokens: 200_000, display_name: "Claude 3 Sonnet" },
|
|
||||||
{ name: "claude-3-haiku", tokens: 200_000, display_name: "Claude 3 Haiku" },
|
|
||||||
],
|
],
|
||||||
tokenizer: DiscourseAi::Tokenizer::AnthropicTokenizer,
|
tokenizer: DiscourseAi::Tokenizer::AnthropicTokenizer,
|
||||||
endpoint: "https://api.anthropic.com/v1/messages",
|
endpoint: "https://api.anthropic.com/v1/messages",
|
||||||
|
@ -63,6 +62,8 @@ module DiscourseAi
|
||||||
{
|
{
|
||||||
id: "open_ai",
|
id: "open_ai",
|
||||||
models: [
|
models: [
|
||||||
|
{ name: "o1-preview", tokens: 131_072, display_name: "o1" },
|
||||||
|
{ name: "o1-mini", tokens: 131_072, display_name: "o1 mini" },
|
||||||
{ name: "gpt-4o", tokens: 131_072, display_name: "GPT-4 Omni" },
|
{ name: "gpt-4o", tokens: 131_072, display_name: "GPT-4 Omni" },
|
||||||
{ name: "gpt-4o-mini", tokens: 131_072, display_name: "GPT-4 Omni Mini" },
|
{ name: "gpt-4o-mini", tokens: 131_072, display_name: "GPT-4 Omni Mini" },
|
||||||
{ name: "gpt-4-turbo", tokens: 131_072, display_name: "GPT-4 Turbo" },
|
{ name: "gpt-4-turbo", tokens: 131_072, display_name: "GPT-4 Turbo" },
|
||||||
|
@ -71,6 +72,24 @@ module DiscourseAi
|
||||||
endpoint: "https://api.openai.com/v1/chat/completions",
|
endpoint: "https://api.openai.com/v1/chat/completions",
|
||||||
provider: "open_ai",
|
provider: "open_ai",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: "samba_nova",
|
||||||
|
models: [
|
||||||
|
{
|
||||||
|
name: "Meta-Llama-3.1-8B-Instruct",
|
||||||
|
tokens: 16_384,
|
||||||
|
display_name: "Llama 3.1 8B",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Meta-Llama-3.1-70B-Instruct",
|
||||||
|
tokens: 65_536,
|
||||||
|
display_name: "Llama 3.1 70B",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tokenizer: DiscourseAi::Tokenizer::Llama3Tokenizer,
|
||||||
|
endpoint: "https://api.sambanova.ai/v1/chat/completions",
|
||||||
|
provider: "samba_nova",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,6 +6,10 @@ module DiscourseAi
|
||||||
attr_reader :id, :name, :parameters
|
attr_reader :id, :name, :parameters
|
||||||
attr_accessor :partial
|
attr_accessor :partial
|
||||||
|
|
||||||
|
def partial?
|
||||||
|
!!@partial
|
||||||
|
end
|
||||||
|
|
||||||
def initialize(id:, name:, parameters: nil)
|
def initialize(id:, name:, parameters: nil)
|
||||||
@id = id
|
@id = id
|
||||||
@name = name
|
@name = name
|
||||||
|
|
|
@ -16,7 +16,12 @@ module DiscourseAi
|
||||||
@current_value = nil
|
@current_value = nil
|
||||||
end
|
end
|
||||||
|
|
||||||
@parser.value { |v| tool_call.notify_progress(@current_key, v) if @current_key }
|
@parser.value do |v|
|
||||||
|
if @current_key
|
||||||
|
tool_call.notify_progress(@current_key, v)
|
||||||
|
@current_key = nil
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def <<(json)
|
def <<(json)
|
||||||
|
|
|
@ -7,11 +7,13 @@
|
||||||
module DiscourseAi
|
module DiscourseAi
|
||||||
module Completions
|
module Completions
|
||||||
class XmlToolProcessor
|
class XmlToolProcessor
|
||||||
def initialize
|
def initialize(partial_tool_calls: false)
|
||||||
@buffer = +""
|
@buffer = +""
|
||||||
@function_buffer = +""
|
@function_buffer = +""
|
||||||
@should_cancel = false
|
@should_cancel = false
|
||||||
@in_tool = false
|
@in_tool = false
|
||||||
|
@partial_tool_calls = partial_tool_calls
|
||||||
|
@partial_tools = [] if @partial_tool_calls
|
||||||
end
|
end
|
||||||
|
|
||||||
def <<(text)
|
def <<(text)
|
||||||
|
@ -28,10 +30,10 @@ module DiscourseAi
|
||||||
if @in_tool
|
if @in_tool
|
||||||
@function_buffer = @buffer[index..-1]
|
@function_buffer = @buffer[index..-1]
|
||||||
text_index = text.rindex("<function_calls>")
|
text_index = text.rindex("<function_calls>")
|
||||||
result << text[0..text_index - 1].strip if text_index && text_index > 0
|
result << text[0..text_index - 1].rstrip if text_index && text_index > 0
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
@function_buffer << text
|
add_to_function_buffer(text)
|
||||||
end
|
end
|
||||||
|
|
||||||
if !@in_tool
|
if !@in_tool
|
||||||
|
@ -41,7 +43,7 @@ module DiscourseAi
|
||||||
@function_buffer = text[split_index + 1..-1] || ""
|
@function_buffer = text[split_index + 1..-1] || ""
|
||||||
text = text[0..split_index] || ""
|
text = text[0..split_index] || ""
|
||||||
else
|
else
|
||||||
@function_buffer << text
|
add_to_function_buffer(text)
|
||||||
text = ""
|
text = ""
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
|
@ -56,37 +58,25 @@ module DiscourseAi
|
||||||
@should_cancel = true if text.include?("</function_calls>")
|
@should_cancel = true if text.include?("</function_calls>")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if @should_notify_partial_tool
|
||||||
|
@should_notify_partial_tool = false
|
||||||
|
result << @partial_tools.last
|
||||||
|
end
|
||||||
|
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
def finish
|
def finish
|
||||||
return [] if @function_buffer.blank?
|
return [] if @function_buffer.blank?
|
||||||
|
|
||||||
xml = Nokogiri::HTML5.fragment(@function_buffer)
|
idx = -1
|
||||||
normalize_function_ids!(xml)
|
parse_malformed_xml(@function_buffer).map do |tool|
|
||||||
last_invoke = xml.at("invoke:last")
|
ToolCall.new(
|
||||||
if last_invoke
|
id: "tool_#{idx += 1}",
|
||||||
last_invoke.next_sibling.remove while last_invoke.next_sibling
|
name: tool[:tool_name],
|
||||||
xml.at("invoke:last").add_next_sibling("\n") if !last_invoke.next_sibling
|
parameters: tool[:parameters],
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
xml
|
|
||||||
.css("invoke")
|
|
||||||
.map do |invoke|
|
|
||||||
tool_name = invoke.at("tool_name").content.force_encoding("UTF-8")
|
|
||||||
tool_id = invoke.at("tool_id").content.force_encoding("UTF-8")
|
|
||||||
parameters = {}
|
|
||||||
invoke
|
|
||||||
.at("parameters")
|
|
||||||
&.children
|
|
||||||
&.each do |node|
|
|
||||||
next if node.text?
|
|
||||||
name = node.name
|
|
||||||
value = node.content.to_s
|
|
||||||
parameters[name.to_sym] = value.to_s.force_encoding("UTF-8")
|
|
||||||
end
|
|
||||||
ToolCall.new(id: tool_id, name: tool_name, parameters: parameters)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def should_cancel?
|
def should_cancel?
|
||||||
|
@ -95,6 +85,105 @@ module DiscourseAi
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def add_to_function_buffer(text)
|
||||||
|
@function_buffer << text
|
||||||
|
detect_partial_tool_calls(@function_buffer, text) if @partial_tool_calls
|
||||||
|
end
|
||||||
|
|
||||||
|
def detect_partial_tool_calls(buffer, delta)
|
||||||
|
parse_partial_tool_call(buffer)
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_partial_tool_call(buffer)
|
||||||
|
match =
|
||||||
|
buffer
|
||||||
|
.scan(
|
||||||
|
%r{
|
||||||
|
<invoke>
|
||||||
|
\s*
|
||||||
|
<tool_name>
|
||||||
|
([^<]+)
|
||||||
|
</tool_name>
|
||||||
|
\s*
|
||||||
|
<parameters>
|
||||||
|
(.*?)
|
||||||
|
(</parameters>|\Z)
|
||||||
|
}mx,
|
||||||
|
)
|
||||||
|
.to_a
|
||||||
|
.last
|
||||||
|
|
||||||
|
if match
|
||||||
|
params = partial_parse_params(match[1])
|
||||||
|
if params.present?
|
||||||
|
current_tool = @partial_tools.last
|
||||||
|
if !current_tool || current_tool.name != match[0].strip
|
||||||
|
current_tool =
|
||||||
|
ToolCall.new(
|
||||||
|
id: "tool_#{@partial_tools.length}",
|
||||||
|
name: match[0].strip,
|
||||||
|
parameters: params,
|
||||||
|
)
|
||||||
|
@partial_tools << current_tool
|
||||||
|
current_tool.partial = true
|
||||||
|
@should_notify_partial_tool = true
|
||||||
|
end
|
||||||
|
|
||||||
|
if current_tool.parameters != params
|
||||||
|
current_tool.parameters = params
|
||||||
|
@should_notify_partial_tool = true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def partial_parse_params(params)
|
||||||
|
params
|
||||||
|
.scan(%r{
|
||||||
|
<([^>]+)>
|
||||||
|
(.*?)
|
||||||
|
(</\1>|\Z)
|
||||||
|
}mx)
|
||||||
|
.each_with_object({}) do |(name, value), hash|
|
||||||
|
next if "<![CDATA[".start_with?(value)
|
||||||
|
hash[name.to_sym] = value.gsub(/^<!\[CDATA\[|\]\]>$/, "")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_malformed_xml(input)
|
||||||
|
input
|
||||||
|
.scan(
|
||||||
|
%r{
|
||||||
|
<invoke>
|
||||||
|
\s*
|
||||||
|
<tool_name>
|
||||||
|
([^<]+)
|
||||||
|
</tool_name>
|
||||||
|
\s*
|
||||||
|
<parameters>
|
||||||
|
(.*?)
|
||||||
|
</parameters>
|
||||||
|
\s*
|
||||||
|
</invoke>
|
||||||
|
}mx,
|
||||||
|
)
|
||||||
|
.map do |tool_name, params|
|
||||||
|
{
|
||||||
|
tool_name: tool_name.strip,
|
||||||
|
parameters:
|
||||||
|
params
|
||||||
|
.scan(%r{
|
||||||
|
<([^>]+)>
|
||||||
|
(.*?)
|
||||||
|
</\1>
|
||||||
|
}mx)
|
||||||
|
.each_with_object({}) do |(name, value), hash|
|
||||||
|
hash[name.to_sym] = value.gsub(/^<!\[CDATA\[|\]\]>$/, "")
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def normalize_function_ids!(function_buffer)
|
def normalize_function_ids!(function_buffer)
|
||||||
function_buffer
|
function_buffer
|
||||||
.css("invoke")
|
.css("invoke")
|
||||||
|
|
|
@ -39,6 +39,8 @@ register_asset "stylesheets/modules/llms/common/ai-llms-editor.scss"
|
||||||
|
|
||||||
register_asset "stylesheets/modules/ai-bot/common/ai-tools.scss"
|
register_asset "stylesheets/modules/ai-bot/common/ai-tools.scss"
|
||||||
|
|
||||||
|
register_asset "stylesheets/modules/ai-bot/common/ai-artifact.scss"
|
||||||
|
|
||||||
module ::DiscourseAi
|
module ::DiscourseAi
|
||||||
PLUGIN_NAME = "discourse-ai"
|
PLUGIN_NAME = "discourse-ai"
|
||||||
end
|
end
|
||||||
|
@ -48,6 +50,10 @@ Rails.autoloaders.main.push_dir(File.join(__dir__, "lib"), namespace: ::Discours
|
||||||
require_relative "lib/engine"
|
require_relative "lib/engine"
|
||||||
|
|
||||||
after_initialize do
|
after_initialize do
|
||||||
|
if defined?(Rack::MiniProfiler)
|
||||||
|
Rack::MiniProfiler.config.skip_paths << "/discourse-ai/ai-bot/artifacts"
|
||||||
|
end
|
||||||
|
|
||||||
# do not autoload this cause we may have no namespace
|
# do not autoload this cause we may have no namespace
|
||||||
require_relative "discourse_automation/llm_triage"
|
require_relative "discourse_automation/llm_triage"
|
||||||
require_relative "discourse_automation/llm_report"
|
require_relative "discourse_automation/llm_report"
|
||||||
|
|
|
@ -0,0 +1,299 @@
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_yH3ixdEz4wvSuK8ei3gNYwk3","type":"function","function":{"name":"create_artifact","arguments":""}}],"refusal":null},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"name"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Five"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Lines"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"HTML"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"CSS"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"JS"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Hello"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"World"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"html"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"_body"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"<h"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">Hello"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"</"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"h"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"<p"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"World"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"!</"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"p"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"<button"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" onclick"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"=\\\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"show"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"()"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\\">"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"G"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"reet"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"</"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"button"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"<div"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" id"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"=\\\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\\"></"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"div"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"<hr"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":">"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"css"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"body"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" margin"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":":"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"0"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":";"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" }\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"nh"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" color"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":":"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" blue"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":";"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" }\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"np"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" font"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"-size"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":":"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"20"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"px"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":";"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" }\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"button"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" padding"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":":"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"10"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"px"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":";"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" }\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"hr"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" border"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":":"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"1"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"px"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" solid"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" #"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"ccc"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":";"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" }"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\",\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"js"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\":\""}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"function"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" show"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"()"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" {\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" var"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Div"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" ="}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" document"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":".get"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Element"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"By"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Id"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"('"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"');"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" "}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" message"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Div"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":".text"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Content"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" ="}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" '"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"Hello"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":","}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":" World"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"!"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"';"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"}\\"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"n"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"}"}}]},"logprobs":null,"finish_reason":null}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
|
||||||
|
|
||||||
|
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[],"usage":{"prompt_tokens":735,"completion_tokens":156,"total_tokens":891,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
|
||||||
|
|
||||||
|
data: [DONE]
|
||||||
|
|
||||||
|
|
|
@ -186,7 +186,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do
|
||||||
|
|
||||||
expected_body = {
|
expected_body = {
|
||||||
model: "claude-3-opus-20240229",
|
model: "claude-3-opus-20240229",
|
||||||
max_tokens: 3000,
|
max_tokens: 4096,
|
||||||
messages: [{ role: "user", content: "user1: hello" }],
|
messages: [{ role: "user", content: "user1: hello" }],
|
||||||
system: "You are hello bot",
|
system: "You are hello bot",
|
||||||
stream: true,
|
stream: true,
|
||||||
|
@ -278,7 +278,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do
|
||||||
|
|
||||||
request_body = {
|
request_body = {
|
||||||
model: "claude-3-opus-20240229",
|
model: "claude-3-opus-20240229",
|
||||||
max_tokens: 3000,
|
max_tokens: 4096,
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
role: "user",
|
role: "user",
|
||||||
|
@ -376,7 +376,7 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Anthropic do
|
||||||
|
|
||||||
expected_body = {
|
expected_body = {
|
||||||
model: "claude-3-opus-20240229",
|
model: "claude-3-opus-20240229",
|
||||||
max_tokens: 3000,
|
max_tokens: 4096,
|
||||||
messages: [{ role: "user", content: "user1: hello" }],
|
messages: [{ role: "user", content: "user1: hello" }],
|
||||||
system: "You are hello bot",
|
system: "You are hello bot",
|
||||||
}
|
}
|
||||||
|
|
|
@ -324,6 +324,37 @@ RSpec.describe DiscourseAi::Completions::Endpoints::Gemini do
|
||||||
expect(log.response_tokens).to eq(4)
|
expect(log.response_tokens).to eq(4)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "Can correctly handle malformed responses" do
|
||||||
|
response = <<~TEXT
|
||||||
|
data: {"candidates": [{"content": {"parts": [{"text": "Certainly"}],"role": "model"}}],"usageMetadata": {"promptTokenCount": 399,"totalTokenCount": 399},"modelVersion": "gemini-1.5-pro-002"}
|
||||||
|
|
||||||
|
data: {"candidates": [{"content": {"parts": [{"text": "! I'll create a simple \\"Hello, World!\\" page where each letter"}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 399,"totalTokenCount": 399},"modelVersion": "gemini-1.5-pro-002"}
|
||||||
|
|
||||||
|
data: {"candidates": [{"content": {"parts": [{"text": " has a different color using inline styles for simplicity. Each letter will be wrapped"}],"role": "model"},"safetyRatings": [{"category": "HARM_CATEGORY_HATE_SPEECH","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_DANGEROUS_CONTENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_HARASSMENT","probability": "NEGLIGIBLE"},{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT","probability": "NEGLIGIBLE"}]}],"usageMetadata": {"promptTokenCount": 399,"totalTokenCount": 399},"modelVersion": "gemini-1.5-pro-002"}
|
||||||
|
|
||||||
|
data: {"candidates": [{"content": {"parts": [{"text": ""}],"role": "model"},"finishReason": "STOP"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"}
|
||||||
|
|
||||||
|
data: {"candidates": [{"finishReason": "MALFORMED_FUNCTION_CALL"}],"usageMetadata": {"promptTokenCount": 399,"candidatesTokenCount": 191,"totalTokenCount": 590},"modelVersion": "gemini-1.5-pro-002"}
|
||||||
|
|
||||||
|
TEXT
|
||||||
|
|
||||||
|
llm = DiscourseAi::Completions::Llm.proxy("custom:#{model.id}")
|
||||||
|
url = "#{model.url}:streamGenerateContent?alt=sse&key=123"
|
||||||
|
|
||||||
|
output = []
|
||||||
|
|
||||||
|
stub_request(:post, url).to_return(status: 200, body: response)
|
||||||
|
llm.generate("Hello", user: user) { |partial| output << partial }
|
||||||
|
|
||||||
|
expect(output).to eq(
|
||||||
|
[
|
||||||
|
"Certainly",
|
||||||
|
"! I'll create a simple \"Hello, World!\" page where each letter",
|
||||||
|
" has a different color using inline styles for simplicity. Each letter will be wrapped",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
it "Can correctly handle streamed responses even if they are chunked badly" do
|
it "Can correctly handle streamed responses even if they are chunked badly" do
|
||||||
data = +""
|
data = +""
|
||||||
data << "da|ta: |"
|
data << "da|ta: |"
|
||||||
|
|
|
@ -53,11 +53,10 @@ class OpenAiMock < EndpointMock
|
||||||
}.to_json
|
}.to_json
|
||||||
end
|
end
|
||||||
|
|
||||||
def stub_raw(chunks)
|
def stub_raw(chunks, body_blk: nil)
|
||||||
WebMock.stub_request(:post, "https://api.openai.com/v1/chat/completions").to_return(
|
stub = WebMock.stub_request(:post, "https://api.openai.com/v1/chat/completions")
|
||||||
status: 200,
|
stub.with(body: body_blk) if body_blk
|
||||||
body: chunks,
|
stub.to_return(status: 200, body: chunks)
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def stub_streamed_response(prompt, deltas, tool_call: false)
|
def stub_streamed_response(prompt, deltas, tool_call: false)
|
||||||
|
@ -391,6 +390,59 @@ RSpec.describe DiscourseAi::Completions::Endpoints::OpenAi do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#perform_completion!" do
|
describe "#perform_completion!" do
|
||||||
|
context "when using XML tool calls format" do
|
||||||
|
let(:xml_tool_call_response) { <<~XML }
|
||||||
|
<function_calls>
|
||||||
|
<invoke>
|
||||||
|
<tool_name>get_weather</tool_name>
|
||||||
|
<parameters>
|
||||||
|
<location>Sydney</location>
|
||||||
|
<unit>c</unit>
|
||||||
|
</parameters>
|
||||||
|
</invoke>
|
||||||
|
</function_calls>
|
||||||
|
XML
|
||||||
|
|
||||||
|
it "parses XML tool calls" do
|
||||||
|
response = {
|
||||||
|
id: "chatcmpl-6sZfAb30Rnv9Q7ufzFwvQsMpjZh8S",
|
||||||
|
object: "chat.completion",
|
||||||
|
created: 1_678_464_820,
|
||||||
|
model: "gpt-3.5-turbo-0301",
|
||||||
|
usage: {
|
||||||
|
prompt_tokens: 8,
|
||||||
|
completion_tokens: 13,
|
||||||
|
total_tokens: 499,
|
||||||
|
},
|
||||||
|
choices: [
|
||||||
|
{
|
||||||
|
message: {
|
||||||
|
role: "assistant",
|
||||||
|
content: xml_tool_call_response,
|
||||||
|
},
|
||||||
|
finish_reason: "stop",
|
||||||
|
index: 0,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}.to_json
|
||||||
|
|
||||||
|
endpoint.llm_model.update!(provider_params: { disable_native_tools: true })
|
||||||
|
body = nil
|
||||||
|
open_ai_mock.stub_raw(response, body_blk: proc { |inner_body| body = inner_body })
|
||||||
|
|
||||||
|
dialect = compliance.dialect(prompt: compliance.generic_prompt(tools: tools))
|
||||||
|
tool_call = endpoint.perform_completion!(dialect, user)
|
||||||
|
|
||||||
|
body_parsed = JSON.parse(body, symbolize_names: true)
|
||||||
|
expect(body_parsed[:tools]).to eq(nil)
|
||||||
|
|
||||||
|
expect(body_parsed[:messages][0][:content]).to include("<function_calls>")
|
||||||
|
|
||||||
|
expect(tool_call.name).to eq("get_weather")
|
||||||
|
expect(tool_call.parameters).to eq({ location: "Sydney", unit: "c" })
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context "when using regular mode" do
|
context "when using regular mode" do
|
||||||
context "with simple prompts" do
|
context "with simple prompts" do
|
||||||
it "completes a trivial prompt and logs the response" do
|
it "completes a trivial prompt and logs the response" do
|
||||||
|
@ -571,6 +623,42 @@ TEXT
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
it "properly handles multiple params in partial tool calls" do
|
||||||
|
# this is not working and it is driving me nuts so I will use a sledghammer
|
||||||
|
# text = plugin_file_from_fixtures("openai_artifact_call.txt", "bot")
|
||||||
|
|
||||||
|
path = File.join(__dir__, "../../../fixtures/bot", "openai_artifact_call.txt")
|
||||||
|
text = File.read(path)
|
||||||
|
|
||||||
|
partials = []
|
||||||
|
open_ai_mock.with_chunk_array_support do
|
||||||
|
open_ai_mock.stub_raw(text.scan(/.*\n/))
|
||||||
|
|
||||||
|
dialect = compliance.dialect(prompt: compliance.generic_prompt(tools: tools))
|
||||||
|
endpoint.perform_completion!(dialect, user, partial_tool_calls: true) do |partial|
|
||||||
|
partials << partial.dup
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
expect(partials.compact.length).to eq(128)
|
||||||
|
|
||||||
|
params =
|
||||||
|
partials
|
||||||
|
.map { |p| p.parameters if p.is_a?(DiscourseAi::Completions::ToolCall) && p.partial? }
|
||||||
|
.compact
|
||||||
|
|
||||||
|
lengths = {}
|
||||||
|
params.each do |p|
|
||||||
|
p.each do |k, v|
|
||||||
|
if lengths[k] && lengths[k] > v.length
|
||||||
|
expect(lengths[k]).to be > v.length
|
||||||
|
else
|
||||||
|
lengths[k] = v.length
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
it "properly handles spaces in tools payload and partial tool calls" do
|
it "properly handles spaces in tools payload and partial tool calls" do
|
||||||
raw_data = <<~TEXT.strip
|
raw_data = <<~TEXT.strip
|
||||||
data: {"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"func_id","type":"function","function":{"name":"go|ogle","arg|uments":""}}]}}]}
|
data: {"choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"func_id","type":"function","function":{"name":"go|ogle","arg|uments":""}}]}}]}
|
||||||
|
|
|
@ -12,12 +12,84 @@ RSpec.describe DiscourseAi::Completions::XmlToolProcessor do
|
||||||
expect(processor.should_cancel?).to eq(false)
|
expect(processor.should_cancel?).to eq(false)
|
||||||
end
|
end
|
||||||
|
|
||||||
it "is usable for simple single message mode" do
|
it "can handle partial tool calls" do
|
||||||
|
processor = DiscourseAi::Completions::XmlToolProcessor.new(partial_tool_calls: true)
|
||||||
|
|
||||||
|
xml = (<<~XML).strip
|
||||||
|
<function|_calls>
|
||||||
|
<invoke>
|
||||||
|
<tool_name>h|ell|o<|/tool_name>
|
||||||
|
<parameters>
|
||||||
|
<hello>wo|r|ld</hello>
|
||||||
|
</parameters>
|
||||||
|
</invoke>
|
||||||
|
<invoke>
|
||||||
|
<tool_name>tool|2</tool_name>
|
||||||
|
<parameters>
|
||||||
|
<param>v|alue</param>
|
||||||
|
<para|m2><![CDA|TA[va|lue2]]></param2>
|
||||||
|
</parame|ters>
|
||||||
|
</invoke>
|
||||||
|
XML
|
||||||
|
|
||||||
|
result = []
|
||||||
|
|
||||||
|
xml.split("|").each { |part| result << (processor << part).map(&:dup) }
|
||||||
|
|
||||||
|
result << (processor.finish)
|
||||||
|
result.flatten!
|
||||||
|
|
||||||
|
tool1_params =
|
||||||
|
result
|
||||||
|
.select do |r|
|
||||||
|
r.is_a?(DiscourseAi::Completions::ToolCall) && r.name == "hello" && r.partial
|
||||||
|
end
|
||||||
|
.map(&:parameters)
|
||||||
|
|
||||||
|
expect(tool1_params).to eq([{ hello: "wo" }, { hello: "wor" }, { hello: "world" }])
|
||||||
|
|
||||||
|
tool2_params =
|
||||||
|
result
|
||||||
|
.select do |r|
|
||||||
|
r.is_a?(DiscourseAi::Completions::ToolCall) && r.name == "tool2" && r.partial
|
||||||
|
end
|
||||||
|
.map(&:parameters)
|
||||||
|
|
||||||
|
expect(tool2_params).to eq(
|
||||||
|
[
|
||||||
|
{ param: "v" },
|
||||||
|
{ param: "value" },
|
||||||
|
{ param: "value", param2: "va" },
|
||||||
|
{ param: "value", param2: "value2" },
|
||||||
|
],
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "can handle mix and match xml cause tool llms may not encode" do
|
||||||
xml = (<<~XML).strip
|
xml = (<<~XML).strip
|
||||||
hello
|
|
||||||
<function_calls>
|
<function_calls>
|
||||||
<invoke>
|
<invoke>
|
||||||
<tool_name>hello</tool_name>
|
<tool_name>hello</tool_name>
|
||||||
|
<parameters>
|
||||||
|
<hello>world <sam>sam</sam></hello>
|
||||||
|
<test><![CDATA[</h1>\n</div>\n]]></test>
|
||||||
|
</parameters>
|
||||||
|
</invoke>
|
||||||
|
XML
|
||||||
|
|
||||||
|
result = []
|
||||||
|
result << (processor << xml)
|
||||||
|
result << (processor.finish)
|
||||||
|
|
||||||
|
tool_call = result.last.first
|
||||||
|
expect(tool_call.parameters).to eq(hello: "world <sam>sam</sam>", test: "</h1>\n</div>\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
it "is usable for simple single message mode" do
|
||||||
|
xml = (<<~XML)
|
||||||
|
world <function_calls>
|
||||||
|
<invoke>
|
||||||
|
<tool_name>hello</tool_name>
|
||||||
<parameters>
|
<parameters>
|
||||||
<hello>world</hello>
|
<hello>world</hello>
|
||||||
<test>value</test>
|
<test>value</test>
|
||||||
|
@ -26,6 +98,7 @@ RSpec.describe DiscourseAi::Completions::XmlToolProcessor do
|
||||||
XML
|
XML
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
|
result << (processor << "hello")
|
||||||
result << (processor << xml)
|
result << (processor << xml)
|
||||||
result << (processor.finish)
|
result << (processor.finish)
|
||||||
|
|
||||||
|
@ -38,7 +111,7 @@ RSpec.describe DiscourseAi::Completions::XmlToolProcessor do
|
||||||
test: "value",
|
test: "value",
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
expect(result).to eq([["hello"], [tool_call]])
|
expect(result).to eq([["hello"], [" world"], [tool_call]])
|
||||||
expect(processor.should_cancel?).to eq(false)
|
expect(processor.should_cancel?).to eq(false)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -149,8 +222,7 @@ RSpec.describe DiscourseAi::Completions::XmlToolProcessor do
|
||||||
result << (processor.finish)
|
result << (processor.finish)
|
||||||
|
|
||||||
# Should just do its best to parse the XML
|
# Should just do its best to parse the XML
|
||||||
tool_call =
|
tool_call = DiscourseAi::Completions::ToolCall.new(id: "tool_0", name: "test", parameters: {})
|
||||||
DiscourseAi::Completions::ToolCall.new(id: "tool_0", name: "test", parameters: { param: "" })
|
|
||||||
expect(result).to eq([["text"], [tool_call]])
|
expect(result).to eq([["text"], [tool_call]])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -46,6 +46,7 @@ RSpec.describe DiscourseAi::AiBot::Personas::Persona do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
fab!(:admin)
|
||||||
fab!(:user)
|
fab!(:user)
|
||||||
fab!(:upload)
|
fab!(:upload)
|
||||||
|
|
||||||
|
@ -96,29 +97,6 @@ RSpec.describe DiscourseAi::AiBot::Personas::Persona do
|
||||||
end
|
end
|
||||||
|
|
||||||
it "enforces enums" do
|
it "enforces enums" do
|
||||||
xml = <<~XML
|
|
||||||
<function_calls>
|
|
||||||
<invoke>
|
|
||||||
<tool_name>search</tool_name>
|
|
||||||
<tool_id>call_JtYQMful5QKqw97XFsHzPweB</tool_id>
|
|
||||||
<parameters>
|
|
||||||
<max_posts>"3.2"</max_posts>
|
|
||||||
<status>cow</status>
|
|
||||||
<foo>bar</foo>
|
|
||||||
</parameters>
|
|
||||||
</invoke>
|
|
||||||
<invoke>
|
|
||||||
<tool_name>search</tool_name>
|
|
||||||
<tool_id>call_JtYQMful5QKqw97XFsHzPweB</tool_id>
|
|
||||||
<parameters>
|
|
||||||
<max_posts>"3.2"</max_posts>
|
|
||||||
<status>open</status>
|
|
||||||
<foo>bar</foo>
|
|
||||||
</parameters>
|
|
||||||
</invoke>
|
|
||||||
</function_calls>
|
|
||||||
XML
|
|
||||||
|
|
||||||
tool_call =
|
tool_call =
|
||||||
DiscourseAi::Completions::ToolCall.new(
|
DiscourseAi::Completions::ToolCall.new(
|
||||||
name: "search",
|
name: "search",
|
||||||
|
@ -273,11 +251,27 @@ RSpec.describe DiscourseAi::AiBot::Personas::Persona do
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# it should allow staff access to WebArtifactCreator
|
||||||
|
expect(DiscourseAi::AiBot::Personas::Persona.all(user: admin)).to eq(
|
||||||
|
[
|
||||||
|
DiscourseAi::AiBot::Personas::General,
|
||||||
|
DiscourseAi::AiBot::Personas::Artist,
|
||||||
|
DiscourseAi::AiBot::Personas::Creative,
|
||||||
|
DiscourseAi::AiBot::Personas::DiscourseHelper,
|
||||||
|
DiscourseAi::AiBot::Personas::GithubHelper,
|
||||||
|
DiscourseAi::AiBot::Personas::Researcher,
|
||||||
|
DiscourseAi::AiBot::Personas::SettingsExplorer,
|
||||||
|
DiscourseAi::AiBot::Personas::SqlHelper,
|
||||||
|
DiscourseAi::AiBot::Personas::WebArtifactCreator,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
# omits personas if key is missing
|
# omits personas if key is missing
|
||||||
SiteSetting.ai_stability_api_key = ""
|
SiteSetting.ai_stability_api_key = ""
|
||||||
SiteSetting.ai_google_custom_search_api_key = ""
|
SiteSetting.ai_google_custom_search_api_key = ""
|
||||||
|
SiteSetting.ai_artifact_security = "disabled"
|
||||||
|
|
||||||
expect(DiscourseAi::AiBot::Personas::Persona.all(user: user)).to contain_exactly(
|
expect(DiscourseAi::AiBot::Personas::Persona.all(user: admin)).to contain_exactly(
|
||||||
DiscourseAi::AiBot::Personas::General,
|
DiscourseAi::AiBot::Personas::General,
|
||||||
DiscourseAi::AiBot::Personas::SqlHelper,
|
DiscourseAi::AiBot::Personas::SqlHelper,
|
||||||
DiscourseAi::AiBot::Personas::SettingsExplorer,
|
DiscourseAi::AiBot::Personas::SettingsExplorer,
|
||||||
|
|
|
@ -791,11 +791,12 @@ RSpec.describe DiscourseAi::AiBot::Playground do
|
||||||
expect(done_signal.data[:cooked]).to eq(reply.cooked)
|
expect(done_signal.data[:cooked]).to eq(reply.cooked)
|
||||||
|
|
||||||
expect(messages.first.data[:raw]).to eq("")
|
expect(messages.first.data[:raw]).to eq("")
|
||||||
messages[1..-1].each_with_index do |m, idx|
|
|
||||||
expect(m.data[:raw]).to eq(expected_bot_response[0..idx])
|
|
||||||
end
|
|
||||||
|
|
||||||
expect(reply.cooked).to eq(PrettyText.cook(expected_bot_response))
|
expect(reply.cooked).to eq(PrettyText.cook(expected_bot_response))
|
||||||
|
|
||||||
|
messages[1..-1].each do |m|
|
||||||
|
expect(expected_bot_response.start_with?(m.data[:raw])).to eq(true)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
#frozen_string_literal: true
|
||||||
|
|
||||||
|
RSpec.describe DiscourseAi::AiBot::Tools::CreateArtifact do
|
||||||
|
fab!(:llm_model)
|
||||||
|
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model(llm_model.name) }
|
||||||
|
let(:llm) { DiscourseAi::Completions::Llm.proxy("custom:#{llm_model.id}") }
|
||||||
|
|
||||||
|
before { SiteSetting.ai_bot_enabled = true }
|
||||||
|
|
||||||
|
describe "#process" do
|
||||||
|
it "can correctly handle partial updates" do
|
||||||
|
tool = described_class.new({}, bot_user: bot_user, llm: llm)
|
||||||
|
|
||||||
|
tool.parameters = { css: "a { }" }
|
||||||
|
tool.partial_invoke
|
||||||
|
|
||||||
|
expect(tool.custom_raw).to eq("### CSS\n\n```css\na { }\n```")
|
||||||
|
|
||||||
|
tool.parameters = { css: "a { }", html_body: "hello" }
|
||||||
|
tool.partial_invoke
|
||||||
|
|
||||||
|
expect(tool.custom_raw).to eq(
|
||||||
|
"### CSS\n\n```css\na { }\n```\n\n### HTML\n\n```html\nhello\n```",
|
||||||
|
)
|
||||||
|
|
||||||
|
tool.parameters = { css: "a { }", html_body: "hello world" }
|
||||||
|
tool.partial_invoke
|
||||||
|
|
||||||
|
expect(tool.custom_raw).to eq(
|
||||||
|
"### CSS\n\n```css\na { }\n```\n\n### HTML\n\n```html\nhello world\n```",
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -86,6 +86,77 @@ RSpec.describe DiscourseAi::AiBot::SharedAiConversationsController do
|
||||||
expect(response).to have_http_status(:success)
|
expect(response).to have_http_status(:success)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context "when ai artifacts are in lax mode" do
|
||||||
|
before { SiteSetting.ai_artifact_security = "lax" }
|
||||||
|
|
||||||
|
it "properly shares artifacts" do
|
||||||
|
first_post = user_pm_share.posts.first
|
||||||
|
|
||||||
|
artifact_not_allowed =
|
||||||
|
AiArtifact.create!(
|
||||||
|
user: bot_user,
|
||||||
|
post: Fabricate(:private_message_post),
|
||||||
|
name: "test",
|
||||||
|
html: "<div>test</div>",
|
||||||
|
)
|
||||||
|
|
||||||
|
artifact =
|
||||||
|
AiArtifact.create!(
|
||||||
|
user: bot_user,
|
||||||
|
post: first_post,
|
||||||
|
name: "test",
|
||||||
|
html: "<div>test</div>",
|
||||||
|
)
|
||||||
|
|
||||||
|
# lets log out and see we can not access the artifacts
|
||||||
|
delete "/session/#{user.id}"
|
||||||
|
|
||||||
|
get artifact.url
|
||||||
|
expect(response).to have_http_status(:not_found)
|
||||||
|
|
||||||
|
get artifact_not_allowed.url
|
||||||
|
expect(response).to have_http_status(:not_found)
|
||||||
|
|
||||||
|
sign_in(user)
|
||||||
|
|
||||||
|
first_post.update!(raw: <<~RAW)
|
||||||
|
This is a post with an artifact
|
||||||
|
|
||||||
|
<div class="ai-artifact" data-ai-artifact-id="#{artifact.id}"></div>
|
||||||
|
<div class="ai-artifact" data-ai-artifact-id="#{artifact_not_allowed.id}"></div>
|
||||||
|
RAW
|
||||||
|
|
||||||
|
post "#{path}.json", params: { topic_id: user_pm_share.id }
|
||||||
|
expect(response).to have_http_status(:success)
|
||||||
|
|
||||||
|
key = response.parsed_body["share_key"]
|
||||||
|
|
||||||
|
get "#{path}/#{key}"
|
||||||
|
expect(response).to have_http_status(:success)
|
||||||
|
|
||||||
|
expect(response.body).to include(artifact.url)
|
||||||
|
expect(response.body).to include(artifact_not_allowed.url)
|
||||||
|
|
||||||
|
# lets log out and see we can not access the artifacts
|
||||||
|
delete "/session/#{user.id}"
|
||||||
|
|
||||||
|
get artifact.url
|
||||||
|
expect(response).to have_http_status(:success)
|
||||||
|
|
||||||
|
get artifact_not_allowed.url
|
||||||
|
expect(response).to have_http_status(:not_found)
|
||||||
|
|
||||||
|
sign_in(user)
|
||||||
|
delete "#{path}/#{key}.json"
|
||||||
|
expect(response).to have_http_status(:success)
|
||||||
|
|
||||||
|
# we can not longer see it...
|
||||||
|
delete "/session/#{user.id}"
|
||||||
|
get artifact.url
|
||||||
|
expect(response).to have_http_status(:not_found)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
context "when secure uploads are enabled" do
|
context "when secure uploads are enabled" do
|
||||||
let(:upload_1) { Fabricate(:s3_image_upload, user: bot_user, secure: true) }
|
let(:upload_1) { Fabricate(:s3_image_upload, user: bot_user, secure: true) }
|
||||||
let(:upload_2) { Fabricate(:s3_image_upload, user: bot_user, secure: true) }
|
let(:upload_2) { Fabricate(:s3_image_upload, user: bot_user, secure: true) }
|
||||||
|
|
|
@ -11,7 +11,7 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
|
||||||
it "correctly sets defaults" do
|
it "correctly sets defaults" do
|
||||||
visit "/admin/plugins/discourse-ai/ai-llms"
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
||||||
|
|
||||||
find("[data-llm-id='anthropic-claude-3-haiku'] button").click()
|
find("[data-llm-id='anthropic-claude-3-5-haiku'] button").click()
|
||||||
find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
|
find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
|
||||||
find(".ai-llm-editor__enabled-chat-bot input").click
|
find(".ai-llm-editor__enabled-chat-bot input").click
|
||||||
find(".ai-llm-editor__save").click()
|
find(".ai-llm-editor__save").click()
|
||||||
|
@ -23,9 +23,9 @@ RSpec.describe "Managing LLM configurations", type: :system, js: true do
|
||||||
|
|
||||||
preset = DiscourseAi::Completions::Llm.presets.find { |p| p[:id] == "anthropic" }
|
preset = DiscourseAi::Completions::Llm.presets.find { |p| p[:id] == "anthropic" }
|
||||||
|
|
||||||
model_preset = preset[:models].find { |m| m[:name] == "claude-3-haiku" }
|
model_preset = preset[:models].find { |m| m[:name] == "claude-3-5-haiku" }
|
||||||
|
|
||||||
expect(llm.name).to eq("claude-3-haiku")
|
expect(llm.name).to eq("claude-3-5-haiku")
|
||||||
expect(llm.url).to eq(preset[:endpoint])
|
expect(llm.url).to eq(preset[:endpoint])
|
||||||
expect(llm.tokenizer).to eq(preset[:tokenizer].to_s)
|
expect(llm.tokenizer).to eq(preset[:tokenizer].to_s)
|
||||||
expect(llm.max_prompt_tokens.to_i).to eq(model_preset[:tokens])
|
expect(llm.max_prompt_tokens.to_i).to eq(model_preset[:tokens])
|
||||||
|
|
Loading…
Reference in New Issue