DEV: Remove the summarization feature (#83)
* DEV: Remove the summarization feature Instead, we'll register summarization implementations for OpenAI, Anthropic, and Discourse AI using the API defined in discourse/discourse#21813. Core and chat will implement features on top of these implementations instead of this plugin extending them. * Register instances that contain the model, requiring less site settings
This commit is contained in:
parent
081231a6eb
commit
3364fec425
|
@ -1,48 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Summarization
|
||||
class SummaryController < ::ApplicationController
|
||||
requires_plugin ::DiscourseAi::PLUGIN_NAME
|
||||
requires_login
|
||||
|
||||
VALID_SINCE_VALUES = [1, 3, 6, 12, 24, 72, 168]
|
||||
VALID_TARGETS = %w[chat_channel topic]
|
||||
|
||||
def show
|
||||
raise PluginDisabled unless SiteSetting.ai_summarization_enabled
|
||||
target_type = params[:target_type]
|
||||
|
||||
raise Discourse::InvalidParameters.new(:target_type) if !VALID_TARGETS.include?(target_type)
|
||||
|
||||
since = nil
|
||||
|
||||
if target_type == "chat_channel"
|
||||
since = params[:since].to_i
|
||||
raise Discourse::InvalidParameters.new(:since) if !VALID_SINCE_VALUES.include?(since)
|
||||
target = Chat::Channel.find_by(id: params[:target_id])
|
||||
raise Discourse::NotFound.new(:chat_channel) if !target
|
||||
raise Discourse::InvalidAccess if !guardian.can_join_chat_channel?(target)
|
||||
else
|
||||
target = Topic.find_by(id: params[:target_id])
|
||||
raise Discourse::NotFound.new(:topic) if !target
|
||||
raise Discourse::InvalidAccess if !guardian.can_see_topic?(target)
|
||||
end
|
||||
|
||||
RateLimiter.new(
|
||||
current_user,
|
||||
"ai_summarization",
|
||||
6,
|
||||
SiteSetting.ai_summarization_rate_limit_minutes.minutes,
|
||||
).performed!
|
||||
|
||||
hijack do
|
||||
summary =
|
||||
DiscourseAi::Summarization::SummaryGenerator.new(target, current_user).summarize!(since)
|
||||
|
||||
render json: { summary: summary }, status: 200
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,32 +0,0 @@
|
|||
<DModalBody @title="discourse_ai.summarization.title">
|
||||
{{#if @allowTimeframe}}
|
||||
<span>{{i18n "discourse_ai.summarization.description"}}</span>
|
||||
<ComboBox
|
||||
@value={{this.sinceHours}}
|
||||
@content={{this.sinceOptions}}
|
||||
@onChange={{action this.summarize}}
|
||||
@valueProperty="value"
|
||||
@class="summarization-since"
|
||||
/>
|
||||
{{/if}}
|
||||
|
||||
<div class="channel-summary">
|
||||
<ConditionalLoadingSpinner @condition={{this.loading}} />
|
||||
|
||||
{{#unless this.loading}}
|
||||
<Textarea @value={{this.summary}} disabled="true" class="summary-area" />
|
||||
{{/unless}}
|
||||
</div>
|
||||
|
||||
</DModalBody>
|
||||
|
||||
<div class="modal-footer">
|
||||
{{#unless @allowTimeframe}}
|
||||
<DButton
|
||||
@class="btn-primary create"
|
||||
@action={{this.summarize}}
|
||||
@label="discourse_ai.summarization.summarize"
|
||||
/>
|
||||
{{/unless}}
|
||||
<DModalCancel @close={{route-action "closeModal"}} />
|
||||
</div>
|
|
@ -1,83 +0,0 @@
|
|||
import Component from "@glimmer/component";
|
||||
import { tracked } from "@glimmer/tracking";
|
||||
import { action } from "@ember/object";
|
||||
import { ajax } from "discourse/lib/ajax";
|
||||
import { popupAjaxError } from "discourse/lib/ajax-error";
|
||||
import I18n from "I18n";
|
||||
|
||||
export default class AiSummary extends Component {
|
||||
@tracked sinceHours = null;
|
||||
@tracked loading = false;
|
||||
@tracked availableSummaries = {};
|
||||
@tracked summary = null;
|
||||
sinceOptions = [
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 1 }),
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 3 }),
|
||||
value: 3,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 6 }),
|
||||
value: 6,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 12 }),
|
||||
value: 12,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 24 }),
|
||||
value: 24,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 72 }),
|
||||
value: 72,
|
||||
},
|
||||
{
|
||||
name: I18n.t("discourse_ai.summarization.since", { count: 168 }),
|
||||
value: 168,
|
||||
},
|
||||
];
|
||||
|
||||
get canSummarize() {
|
||||
return (!this.args.allowTimeframe || this.sinceHours) && !this.loading;
|
||||
}
|
||||
|
||||
@action
|
||||
summarize(value) {
|
||||
this.loading = true;
|
||||
const attrs = {
|
||||
target_id: this.args.targetId,
|
||||
target_type: this.args.targetType,
|
||||
};
|
||||
|
||||
if (this.args.allowTimeframe) {
|
||||
this.sinceHours = value;
|
||||
|
||||
if (this.availableSummaries[this.sinceHours]) {
|
||||
this.summary = this.availableSummaries[this.sinceHours];
|
||||
this.loading = false;
|
||||
return;
|
||||
} else {
|
||||
attrs.since = this.sinceHours;
|
||||
}
|
||||
}
|
||||
|
||||
ajax("/discourse-ai/summarization/summary", {
|
||||
method: "POST",
|
||||
data: attrs,
|
||||
})
|
||||
.then((data) => {
|
||||
if (this.args.allowTimeframe) {
|
||||
this.availableSummaries[this.sinceHours] = data.summary;
|
||||
this.summary = this.availableSummaries[this.sinceHours];
|
||||
} else {
|
||||
this.summary = data.summary;
|
||||
}
|
||||
})
|
||||
.catch(popupAjaxError)
|
||||
.finally(() => (this.loading = false));
|
||||
}
|
||||
}
|
|
@ -1,6 +0,0 @@
|
|||
<AiSummary
|
||||
@targetId={{this.targetId}}
|
||||
@targetType={{this.targetType}}
|
||||
@allowTimeframe={{this.allowTimeframe}}
|
||||
@closeModal={{route-action "closeModal"}}
|
||||
/>
|
|
@ -1,43 +0,0 @@
|
|||
import { withPluginApi } from "discourse/lib/plugin-api";
|
||||
import showModal from "discourse/lib/show-modal";
|
||||
import { action } from "@ember/object";
|
||||
|
||||
function initializeChatChannelSummary(api) {
|
||||
const chat = api.container.lookup("service:chat");
|
||||
if (chat) {
|
||||
api.registerChatComposerButton?.({
|
||||
translatedLabel: "discourse_ai.summarization.title",
|
||||
id: "chat_channel_summary",
|
||||
icon: "discourse-sparkles",
|
||||
action: "showChannelSummary",
|
||||
position: "dropdown",
|
||||
});
|
||||
|
||||
api.modifyClass("component:chat-composer", {
|
||||
pluginId: "discourse-ai",
|
||||
|
||||
@action
|
||||
showChannelSummary() {
|
||||
showModal("ai-summary").setProperties({
|
||||
targetId: this.args.channel.id,
|
||||
targetType: "chat_channel",
|
||||
allowTimeframe: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "discourse-ai-chat-channel-summary",
|
||||
|
||||
initialize(container) {
|
||||
const settings = container.lookup("service:site-settings");
|
||||
const summarizationEnabled =
|
||||
settings.discourse_ai_enabled && settings.ai_summarization_enabled;
|
||||
|
||||
if (summarizationEnabled) {
|
||||
withPluginApi("1.6.0", initializeChatChannelSummary);
|
||||
}
|
||||
},
|
||||
};
|
|
@ -1,48 +0,0 @@
|
|||
import { withPluginApi } from "discourse/lib/plugin-api";
|
||||
import showModal from "discourse/lib/show-modal";
|
||||
|
||||
function initializeTopicSummary(api) {
|
||||
api.modifyClass("component:scrolling-post-stream", {
|
||||
pluginId: "discourse-ai",
|
||||
|
||||
showAiSummary() {
|
||||
showModal("ai-summary").setProperties({
|
||||
targetId: this.posts["posts"][0].topic_id,
|
||||
targetType: "topic",
|
||||
allowTimeframe: false,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
api.addTopicSummaryCallback((html, attrs, widget) => {
|
||||
html.push(
|
||||
widget.attach("button", {
|
||||
className: "btn btn-primary topic-ai-summarization",
|
||||
icon: "magic",
|
||||
title: "discourse_ai.summarization.title",
|
||||
label: "discourse_ai.summarization.title",
|
||||
action: "showAiSummary",
|
||||
})
|
||||
);
|
||||
|
||||
return html;
|
||||
});
|
||||
}
|
||||
|
||||
export default {
|
||||
name: "discourse-ai-topic-summary",
|
||||
|
||||
initialize(container) {
|
||||
const user = container.lookup("service:current-user");
|
||||
|
||||
if (user) {
|
||||
const settings = container.lookup("service:site-settings");
|
||||
const summarizationEnabled =
|
||||
settings.discourse_ai_enabled && settings.ai_summarization_enabled;
|
||||
|
||||
if (summarizationEnabled) {
|
||||
withPluginApi("1.6.0", initializeTopicSummary);
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
|
@ -1,10 +0,0 @@
|
|||
.ai-summary-modal {
|
||||
.summarization-since,
|
||||
.summary-area {
|
||||
margin: 10px 0 10px 0;
|
||||
}
|
||||
|
||||
.summary-area {
|
||||
min-height: 200px;
|
||||
}
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
.topic-ai-summarization {
|
||||
margin-left: 10px;
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
.toggle-summary {
|
||||
button {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.topic-ai-summarization {
|
||||
margin-top: 10px;
|
||||
}
|
||||
}
|
|
@ -17,14 +17,6 @@ en:
|
|||
embeddings:
|
||||
semantic_search: "Topics (Semantic)"
|
||||
|
||||
summarization:
|
||||
title: "Summarize using AI"
|
||||
description: "Select an option below to summarize the conversation sent during the desired timeframe."
|
||||
summarize: "Summarize"
|
||||
since:
|
||||
one: "Last hour"
|
||||
other: "Last %{count} hours"
|
||||
|
||||
ai_bot:
|
||||
cancel_streaming: "Stop reply"
|
||||
default_pm_prefix: "[Untitled AI bot PM]"
|
||||
|
|
|
@ -52,11 +52,8 @@ en:
|
|||
ai_embeddings_semantic_search_enabled: "Enable full-page semantic search."
|
||||
ai_embeddings_semantic_related_include_closed_topics: "Include closed topics in semantic search results"
|
||||
|
||||
ai_summarization_enabled: "Enable the summarization module."
|
||||
ai_summarization_discourse_service_api_endpoint: "URL where the Discourse summarization API is running."
|
||||
ai_summarization_discourse_service_api_key: "API key for the Discourse summarization API."
|
||||
ai_summarization_model: "Model to use for summarization."
|
||||
ai_summarization_rate_limit_minutes: "Minutes to elapse after the summarization limit is reached (6 requests)."
|
||||
|
||||
ai_bot_enabled: "Enable the AI Bot module."
|
||||
ai_bot_allowed_groups: "When the GPT Bot has access to the PM, it will reply to members of these groups."
|
||||
|
@ -116,3 +113,8 @@ en:
|
|||
google:
|
||||
one: "Found %{count} <a href='%{url}'>result</a> for '%{query}'"
|
||||
other: "Found %{count} <a href='%{url}'>results</a> for '%{query}'"
|
||||
|
||||
summarization:
|
||||
configuration_hint:
|
||||
one: "Configure the `%{setting}` setting first."
|
||||
other: "Configure these settings first: %{settings}"
|
||||
|
|
|
@ -10,10 +10,6 @@ DiscourseAi::Engine.routes.draw do
|
|||
get "semantic-search" => "embeddings#search"
|
||||
end
|
||||
|
||||
scope module: :summarization, path: "/summarization", defaults: { format: :json } do
|
||||
post "summary" => "summary#show"
|
||||
end
|
||||
|
||||
scope module: :ai_bot, path: "/ai-bot", defaults: { format: :json } do
|
||||
post "post/:post_id/stop-streaming" => "bot#stop_streaming_response"
|
||||
get "bot-username" => "bot#show_bot_username"
|
||||
|
|
|
@ -182,26 +182,10 @@ plugins:
|
|||
default: false
|
||||
client: true
|
||||
|
||||
ai_summarization_enabled:
|
||||
default: false
|
||||
client: true
|
||||
ai_summarization_discourse_service_api_endpoint: ""
|
||||
ai_summarization_discourse_service_api_key:
|
||||
default: ""
|
||||
secret: true
|
||||
ai_summarization_model:
|
||||
type: enum
|
||||
default: "bart-large-cnn-samsum"
|
||||
allow_any: false
|
||||
choices:
|
||||
- bart-large-cnn-samsum
|
||||
- flan-t5-base-samsum
|
||||
- long-t5-tglobal-base-16384-book-summary
|
||||
- gpt-3.5-turbo
|
||||
- gpt-4
|
||||
- claude-v1
|
||||
- claude-v1-100k
|
||||
ai_summarization_rate_limit_minutes: 10
|
||||
|
||||
ai_bot_enabled:
|
||||
default: false
|
||||
|
|
|
@ -4,10 +4,21 @@ module DiscourseAi
|
|||
module Summarization
|
||||
class EntryPoint
|
||||
def load_files
|
||||
require_relative "summary_generator"
|
||||
require_relative "strategies/anthropic"
|
||||
require_relative "strategies/discourse_ai"
|
||||
require_relative "strategies/open_ai"
|
||||
end
|
||||
|
||||
def inject_into(plugin)
|
||||
[
|
||||
Strategies::OpenAi.new("gpt-4"),
|
||||
Strategies::OpenAi.new("gpt-3.5-turbo"),
|
||||
Strategies::DiscourseAi.new("bart-large-cnn-samsum"),
|
||||
Strategies::DiscourseAi.new("flan-t5-base-samsum"),
|
||||
Strategies::DiscourseAi.new("long-t5-tglobal-base-16384-book-summary"),
|
||||
Strategies::Anthropic.new("claude-v1"),
|
||||
Strategies::Anthropic.new("claude-v1-100k"),
|
||||
].each { |strategy| plugin.register_summarization_strategy(strategy) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Summarization
|
||||
module Strategies
|
||||
class Anthropic < ::Summarization::Base
|
||||
def display_name
|
||||
"Anthropic's #{model}"
|
||||
end
|
||||
|
||||
def correctly_configured?
|
||||
SiteSetting.ai_anthropic_api_key.present?
|
||||
end
|
||||
|
||||
def configuration_hint
|
||||
I18n.t(
|
||||
"discourse_ai.summarization.configuration_hint",
|
||||
count: 1,
|
||||
setting: "ai_anthropic_api_key",
|
||||
)
|
||||
end
|
||||
|
||||
def summarize(content_text)
|
||||
response =
|
||||
::DiscourseAi::Inference::AnthropicCompletions.perform!(
|
||||
prompt(content_text),
|
||||
model,
|
||||
).dig(:completion)
|
||||
|
||||
Nokogiri::HTML5.fragment(response).at("ai").text
|
||||
end
|
||||
|
||||
def prompt(content)
|
||||
truncated_content =
|
||||
::DiscourseAi::Tokenizer::AnthropicTokenizer.truncate(content, max_length - 50)
|
||||
|
||||
"Human: Summarize the following article that is inside <input> tags.
|
||||
Please include only the summary inside <ai> tags.
|
||||
|
||||
<input>##{truncated_content}</input>
|
||||
|
||||
|
||||
Assistant:
|
||||
"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def max_length
|
||||
lengths = { "claude-v1" => 9000, "claude-v1-100k" => 100_000 }
|
||||
|
||||
lengths[model]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,52 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Summarization
|
||||
module Strategies
|
||||
class DiscourseAi < ::Summarization::Base
|
||||
def display_name
|
||||
"Discourse AI's #{model}"
|
||||
end
|
||||
|
||||
def correctly_configured?
|
||||
SiteSetting.ai_summarization_discourse_service_api_endpoint.present? &&
|
||||
SiteSetting.ai_summarization_discourse_service_api_key.present?
|
||||
end
|
||||
|
||||
def configuration_hint
|
||||
I18n.t(
|
||||
"discourse_ai.summarization.configuration_hint",
|
||||
count: 2,
|
||||
settings:
|
||||
"ai_summarization_discourse_service_api_endpoint, ai_summarization_discourse_service_api_key",
|
||||
)
|
||||
end
|
||||
|
||||
def summarize(content_text)
|
||||
::DiscourseAi::Inference::DiscourseClassifier.perform!(
|
||||
"#{SiteSetting.ai_summarization_discourse_service_api_endpoint}/api/v1/classify",
|
||||
model,
|
||||
prompt(content_text),
|
||||
SiteSetting.ai_summarization_discourse_service_api_key,
|
||||
).dig(:summary_text)
|
||||
end
|
||||
|
||||
def prompt(text)
|
||||
::DiscourseAi::Tokenizer::BertTokenizer.truncate(text, max_length)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def max_length
|
||||
lengths = {
|
||||
"bart-large-cnn-samsum" => 1024,
|
||||
"flan-t5-base-samsum" => 512,
|
||||
"long-t5-tglobal-base-16384-book-summary" => 16_384,
|
||||
}
|
||||
|
||||
lengths[model]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,51 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Summarization
|
||||
module Strategies
|
||||
class OpenAi < ::Summarization::Base
|
||||
def display_name
|
||||
"Open AI's #{model}"
|
||||
end
|
||||
|
||||
def correctly_configured?
|
||||
SiteSetting.ai_openai_api_key.present?
|
||||
end
|
||||
|
||||
def configuration_hint
|
||||
I18n.t(
|
||||
"discourse_ai.summarization.configuration_hint",
|
||||
count: 1,
|
||||
setting: "ai_openai_api_key",
|
||||
)
|
||||
end
|
||||
|
||||
def summarize(content_text)
|
||||
::DiscourseAi::Inference::OpenAiCompletions.perform!(prompt(content_text), model).dig(
|
||||
:choices,
|
||||
0,
|
||||
:message,
|
||||
:content,
|
||||
)
|
||||
end
|
||||
|
||||
def prompt(content)
|
||||
truncated_content =
|
||||
::DiscourseAi::Tokenizer::OpenAiTokenizer.truncate(content, max_length - 50)
|
||||
|
||||
messages = [{ role: "system", content: <<~TEXT }]
|
||||
Summarize the following article:\n\n#{truncated_content}
|
||||
TEXT
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def max_length
|
||||
lengths = { "gpt-3.5-turbo" => 4096, "gpt-4" => 8192 }
|
||||
|
||||
lengths[model]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,131 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module DiscourseAi
|
||||
module Summarization
|
||||
class SummaryGenerator
|
||||
def initialize(target, user)
|
||||
@target = target
|
||||
@user = user
|
||||
end
|
||||
|
||||
def summarize!(content_since)
|
||||
content = get_content(content_since)
|
||||
|
||||
send("#{summarization_provider}_summarization", content)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
attr_reader :target, :user
|
||||
|
||||
def summarization_provider
|
||||
case model
|
||||
in "gpt-3.5-turbo" | "gpt-4"
|
||||
"openai"
|
||||
in "claude-v1" | "claude-v1-100k"
|
||||
"anthropic"
|
||||
else
|
||||
"discourse"
|
||||
end
|
||||
end
|
||||
|
||||
def get_content(content_since)
|
||||
case target
|
||||
in Post
|
||||
target.raw
|
||||
in Topic
|
||||
TopicView
|
||||
.new(
|
||||
target,
|
||||
user,
|
||||
{
|
||||
filter: "summary",
|
||||
exclude_deleted_users: true,
|
||||
exclude_hidden: true,
|
||||
show_deleted: false,
|
||||
},
|
||||
)
|
||||
.posts
|
||||
.pluck(:raw)
|
||||
.join("\n")
|
||||
in ::Chat::Channel
|
||||
target
|
||||
.chat_messages
|
||||
.where("chat_messages.created_at > ?", content_since.hours.ago)
|
||||
.includes(:user)
|
||||
.order(created_at: :asc)
|
||||
.pluck(:username_lower, :message)
|
||||
.map { "#{_1}: #{_2}" }
|
||||
.join("\n")
|
||||
else
|
||||
raise "Can't find content to summarize"
|
||||
end
|
||||
end
|
||||
|
||||
def discourse_summarization(content)
|
||||
truncated_content = DiscourseAi::Tokenizer::BertTokenizer.truncate(content, max_length)
|
||||
|
||||
::DiscourseAi::Inference::DiscourseClassifier.perform!(
|
||||
"#{SiteSetting.ai_summarization_discourse_service_api_endpoint}/api/v1/classify",
|
||||
model,
|
||||
truncated_content,
|
||||
SiteSetting.ai_summarization_discourse_service_api_key,
|
||||
).dig(:summary_text)
|
||||
end
|
||||
|
||||
def openai_summarization(content)
|
||||
truncated_content =
|
||||
DiscourseAi::Tokenizer::OpenAiTokenizer.truncate(content, max_length - 50)
|
||||
|
||||
messages = [{ role: "system", content: <<~TEXT }]
|
||||
Summarize the following article:\n\n#{truncated_content}
|
||||
TEXT
|
||||
|
||||
::DiscourseAi::Inference::OpenAiCompletions.perform!(messages, model).dig(
|
||||
:choices,
|
||||
0,
|
||||
:message,
|
||||
:content,
|
||||
)
|
||||
end
|
||||
|
||||
def anthropic_summarization(content)
|
||||
truncated_content =
|
||||
DiscourseAi::Tokenizer::AnthropicTokenizer.truncate(content, max_length - 50)
|
||||
|
||||
messages =
|
||||
"Human: Summarize the following article that is inside <input> tags.
|
||||
Please include only the summary inside <ai> tags.
|
||||
|
||||
<input>##{truncated_content}</input>
|
||||
|
||||
|
||||
Assistant:
|
||||
"
|
||||
|
||||
response =
|
||||
::DiscourseAi::Inference::AnthropicCompletions.perform!(messages, model).dig(:completion)
|
||||
|
||||
Nokogiri::HTML5.fragment(response).at("ai").text
|
||||
end
|
||||
|
||||
def model
|
||||
SiteSetting.ai_summarization_model
|
||||
end
|
||||
|
||||
def max_length
|
||||
lengths = {
|
||||
"bart-large-cnn-samsum" => 1024,
|
||||
"flan-t5-base-samsum" => 512,
|
||||
"long-t5-tglobal-base-16384-book-summary" => 16_384,
|
||||
"gpt-3.5-turbo" => 4096,
|
||||
"gpt-4" => 8192,
|
||||
"claude-v1" => 9000,
|
||||
"claude-v1-100k" => 100_000,
|
||||
}
|
||||
|
||||
lengths[model]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -14,10 +14,6 @@ enabled_site_setting :discourse_ai_enabled
|
|||
|
||||
register_asset "stylesheets/modules/ai-helper/common/ai-helper.scss"
|
||||
|
||||
register_asset "stylesheets/modules/summarization/common/summarization.scss"
|
||||
register_asset "stylesheets/modules/summarization/desktop/summarization.scss", :desktop
|
||||
register_asset "stylesheets/modules/summarization/mobile/summarization.scss", :mobile
|
||||
|
||||
register_asset "stylesheets/modules/ai-bot/common/bot-replies.scss"
|
||||
|
||||
module ::DiscourseAi
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "../../../../support/anthropic_completion_stubs"
|
||||
|
||||
RSpec.describe DiscourseAi::Summarization::Strategies::Anthropic do
|
||||
describe "#summarize" do
|
||||
let(:model) { "claude-v1" }
|
||||
|
||||
subject { described_class.new(model) }
|
||||
|
||||
it "asks an Anthropic's model to summarize the content" do
|
||||
summarization_text = "This is a text"
|
||||
expected_response = "This is a summary"
|
||||
|
||||
AnthropicCompletionStubs.stub_response(
|
||||
subject.prompt(summarization_text),
|
||||
"<ai>#{expected_response}</ai>",
|
||||
req_opts: {
|
||||
max_tokens_to_sample: 300,
|
||||
},
|
||||
)
|
||||
|
||||
expect(subject.summarize(summarization_text)).to eq(expected_response)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe DiscourseAi::Summarization::Strategies::DiscourseAi do
|
||||
describe "#summarize" do
|
||||
let(:model) { "bart-large-cnn-samsum" }
|
||||
|
||||
subject { described_class.new(model) }
|
||||
|
||||
it "asks a Discourse's model to summarize the content" do
|
||||
SiteSetting.ai_summarization_discourse_service_api_endpoint = "https://test.com"
|
||||
summarization_text = "This is a text"
|
||||
expected_response = "This is a summary"
|
||||
|
||||
WebMock
|
||||
.stub_request(
|
||||
:post,
|
||||
"#{SiteSetting.ai_summarization_discourse_service_api_endpoint}/api/v1/classify",
|
||||
)
|
||||
.with(body: JSON.dump(model: model, content: subject.prompt(summarization_text)))
|
||||
.to_return(status: 200, body: JSON.dump(summary_text: expected_response))
|
||||
|
||||
expect(subject.summarize(summarization_text)).to eq(expected_response)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,21 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "../../../../support/openai_completions_inference_stubs"
|
||||
|
||||
RSpec.describe DiscourseAi::Summarization::Strategies::OpenAi do
|
||||
let(:model) { "gpt-3.5-turbo" }
|
||||
|
||||
subject { described_class.new(model) }
|
||||
|
||||
it "asks a OpenAI's model to summarize the content" do
|
||||
summarization_text = "This is a text"
|
||||
expected_response = "This is a summary"
|
||||
|
||||
OpenAiCompletionsInferenceStubs.stub_response(
|
||||
subject.prompt(summarization_text),
|
||||
expected_response,
|
||||
)
|
||||
|
||||
expect(subject.summarize(summarization_text)).to eq(expected_response)
|
||||
end
|
||||
end
|
|
@ -1,72 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
RSpec.describe DiscourseAi::Summarization::SummaryController do
|
||||
describe "#show" do
|
||||
fab!(:user) { Fabricate(:user) }
|
||||
let!(:channel_group) { Fabricate(:group) }
|
||||
let!(:chat_channel) { Fabricate(:private_category_channel, group: channel_group) }
|
||||
|
||||
before do
|
||||
SiteSetting.ai_summarization_enabled = true
|
||||
sign_in(user)
|
||||
end
|
||||
|
||||
context "when summarizing a chat channel" do
|
||||
context "if the user can see the channel" do
|
||||
before { channel_group.add(user) }
|
||||
|
||||
describe "validating inputs" do
|
||||
it "returns a 404 if there is no chat channel" do
|
||||
post "/discourse-ai/summarization/summary",
|
||||
params: {
|
||||
target_type: "chat_channel",
|
||||
target_id: 99,
|
||||
since: 3,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(404)
|
||||
end
|
||||
|
||||
it "returns a 400 if the since param is invalid" do
|
||||
post "/discourse-ai/summarization/summary",
|
||||
params: {
|
||||
target_type: "chat_channel",
|
||||
target_id: chat_channel.id,
|
||||
since: 0,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(400)
|
||||
end
|
||||
|
||||
it "returns a 404 when the module is disabled" do
|
||||
SiteSetting.ai_summarization_enabled = false
|
||||
|
||||
post "/discourse-ai/summarization/summary",
|
||||
params: {
|
||||
target_type: "chat_channel",
|
||||
target_id: chat_channel.id,
|
||||
since: 1,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(404)
|
||||
end
|
||||
end
|
||||
|
||||
context "if the user can't see the channel" do
|
||||
before { channel_group.remove(user) }
|
||||
|
||||
it "returns a 403 if the user can't see the chat channel" do
|
||||
post "/discourse-ai/summarization/summary",
|
||||
params: {
|
||||
target_type: "chat_channel",
|
||||
target_id: chat_channel.id,
|
||||
since: 1,
|
||||
}
|
||||
|
||||
expect(response.status).to eq(403)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,56 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class SummarizationStubs
|
||||
class << self
|
||||
def test_summary
|
||||
"This is a summary"
|
||||
end
|
||||
|
||||
def openai_response(content)
|
||||
{
|
||||
id: "chatcmpl-6sZfAb30Rnv9Q7ufzFwvQsMpjZh8S",
|
||||
object: "chat.completion",
|
||||
created: 1_678_464_820,
|
||||
model: "gpt-3.5-turbo-0301",
|
||||
usage: {
|
||||
prompt_tokens: 337,
|
||||
completion_tokens: 162,
|
||||
total_tokens: 499,
|
||||
},
|
||||
choices: [
|
||||
{ message: { role: "assistant", content: content }, finish_reason: "stop", index: 0 },
|
||||
],
|
||||
}
|
||||
end
|
||||
|
||||
def openai_chat_summarization_stub(chat_messages)
|
||||
prompt_messages =
|
||||
chat_messages
|
||||
.sort_by(&:created_at)
|
||||
.map { |m| "#{m.user.username_lower}: #{m.message}" }
|
||||
.join("\n")
|
||||
|
||||
summary_prompt = [{ role: "system", content: <<~TEXT }]
|
||||
Summarize the following article:\n\n#{prompt_messages}
|
||||
TEXT
|
||||
|
||||
WebMock
|
||||
.stub_request(:post, "https://api.openai.com/v1/chat/completions")
|
||||
.with(body: { model: "gpt-4", messages: summary_prompt }.to_json)
|
||||
.to_return(status: 200, body: JSON.dump(openai_response(test_summary)))
|
||||
end
|
||||
|
||||
def openai_topic_summarization_stub(topic, user)
|
||||
prompt_posts = TopicView.new(topic, user, { filter: "summary" }).posts.map(&:raw).join("\n")
|
||||
|
||||
summary_prompt = [{ role: "system", content: <<~TEXT }]
|
||||
Summarize the following article:\n\n#{prompt_posts}
|
||||
TEXT
|
||||
|
||||
WebMock
|
||||
.stub_request(:post, "https://api.openai.com/v1/chat/completions")
|
||||
.with(body: { model: "gpt-4", messages: summary_prompt }.to_json)
|
||||
.to_return(status: 200, body: JSON.dump(openai_response(test_summary)))
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,24 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module PageObjects
|
||||
module Modals
|
||||
class Summarization < PageObjects::Modals::Base
|
||||
def visible?
|
||||
page.has_css?(".ai-summary-modal", wait: 5)
|
||||
end
|
||||
|
||||
def select_timeframe(option)
|
||||
find(".summarization-since").click
|
||||
find(".select-kit-row[data-value=\"#{option}\"]").click
|
||||
end
|
||||
|
||||
def summary_value
|
||||
find(".summary-area").value
|
||||
end
|
||||
|
||||
def generate_summary
|
||||
find(".ai-summary-modal .create").click
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -1,36 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "../../support/summarization_stubs"
|
||||
|
||||
RSpec.describe "AI chat channel summarization", type: :system, js: true do
|
||||
fab!(:user) { Fabricate(:leader) }
|
||||
fab!(:channel) { Fabricate(:chat_channel) }
|
||||
|
||||
fab!(:message_1) { Fabricate(:chat_message, chat_channel: channel) }
|
||||
fab!(:message_2) { Fabricate(:chat_message, chat_channel: channel) }
|
||||
fab!(:message_3) { Fabricate(:chat_message, chat_channel: channel) }
|
||||
|
||||
before do
|
||||
sign_in(user)
|
||||
chat_system_bootstrap(user, [channel])
|
||||
SiteSetting.ai_summarization_enabled = true
|
||||
SiteSetting.ai_summarization_model = "gpt-4"
|
||||
end
|
||||
|
||||
let(:summarization_modal) { PageObjects::Modals::Summarization.new }
|
||||
|
||||
it "returns a summary using the selected timeframe" do
|
||||
visit("/chat/c/-/#{channel.id}")
|
||||
|
||||
SummarizationStubs.openai_chat_summarization_stub([message_1, message_2, message_3])
|
||||
|
||||
find(".chat-composer-dropdown__trigger-btn").click
|
||||
find(".chat-composer-dropdown__action-btn.chat_channel_summary").click
|
||||
|
||||
expect(summarization_modal).to be_visible
|
||||
|
||||
summarization_modal.select_timeframe("3")
|
||||
|
||||
expect(summarization_modal.summary_value).to eq(SummarizationStubs.test_summary)
|
||||
end
|
||||
end
|
|
@ -1,33 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require_relative "../../support/summarization_stubs"
|
||||
|
||||
RSpec.describe "AI chat channel summarization", type: :system, js: true do
|
||||
fab!(:user) { Fabricate(:leader) }
|
||||
fab!(:topic) { Fabricate(:topic, has_summary: true) }
|
||||
|
||||
fab!(:post_1) { Fabricate(:post, topic: topic) }
|
||||
fab!(:post_2) { Fabricate(:post, topic: topic) }
|
||||
|
||||
before do
|
||||
sign_in(user)
|
||||
SiteSetting.ai_summarization_enabled = true
|
||||
SiteSetting.ai_summarization_model = "gpt-4"
|
||||
end
|
||||
|
||||
let(:summarization_modal) { PageObjects::Modals::Summarization.new }
|
||||
|
||||
it "returns a summary using the selected timeframe" do
|
||||
visit("/t/-/#{topic.id}")
|
||||
|
||||
SummarizationStubs.openai_topic_summarization_stub(topic, user)
|
||||
|
||||
find(".topic-ai-summarization").click
|
||||
|
||||
expect(summarization_modal).to be_visible
|
||||
|
||||
summarization_modal.generate_summary
|
||||
|
||||
expect(summarization_modal.summary_value).to eq(SummarizationStubs.test_summary)
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue