FEATURE: support sending AI report to an email address (#368)

Support emailing the AI report to any arbitrary email
This commit is contained in:
Sam 2023-12-19 17:51:49 +11:00 committed by GitHub
parent d0f54443ae
commit 529703b5ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 103 additions and 14 deletions

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AiReportMailer < ActionMailer::Base
include Email::BuildEmailHelper
def send_report(to_address, opts = {})
build_email(to_address, **opts)
end
end

View File

@ -17,7 +17,6 @@ module DiscourseAi
- Markdown Usage: Enhance readability with **bold**, *italic*, and > quotes. - Markdown Usage: Enhance readability with **bold**, *italic*, and > quotes.
- Linking: Use `#{Discourse.base_url}/t/-/TOPIC_ID/POST_NUMBER` for direct references. - Linking: Use `#{Discourse.base_url}/t/-/TOPIC_ID/POST_NUMBER` for direct references.
- User Mentions: Reference users with @USERNAME - User Mentions: Reference users with @USERNAME
- Context tips: Staff are denoted with Username *. For example: jane * means that jane is a staff member. Do not render the * in the report.
- Add many topic links: strive to link to at least 30 topics in the report. Topic Id is meaningless to end users if you need to throw in a link use [ref](...) or better still just embed it into the [sentence](...) - Add many topic links: strive to link to at least 30 topics in the report. Topic Id is meaningless to end users if you need to throw in a link use [ref](...) or better still just embed it into the [sentence](...)
- Categories and tags: use the format #TAG and #CATEGORY to denote tags and categories - Categories and tags: use the format #TAG and #CATEGORY to denote tags and categories
@ -52,6 +51,7 @@ module DiscourseAi
) )
@sender = User.find_by(username: sender_username) @sender = User.find_by(username: sender_username)
@receivers = User.where(username: receivers) @receivers = User.where(username: receivers)
@email_receivers = receivers.filter { |r| r.include? "@" }
@title = title @title = title
@model = model @model = model
@ -70,6 +70,14 @@ module DiscourseAi
def run! def run!
start_date = (@offset + @days).days.ago start_date = (@offset + @days).days.ago
end_date = start_date + @days.days
@title =
@title.gsub(
"%DATE%",
start_date.strftime("%Y-%m-%d") + " - " + end_date.strftime("%Y-%m-%d"),
)
prioritized_group_ids = [@priority_group_id] if @priority_group_id.present? prioritized_group_ids = [@priority_group_id] if @priority_group_id.present?
context = context =
DiscourseAi::Automation::ReportContextGenerator.generate( DiscourseAi::Automation::ReportContextGenerator.generate(
@ -113,13 +121,16 @@ module DiscourseAi
result << response result << response
end end
receiver_usernames = @receivers.map(&:username).join(",")
if receiver_usernames.present?
post = post =
PostCreator.create!( PostCreator.create!(
@sender, @sender,
raw: result, raw: result,
title: @title, title: @title,
archetype: Archetype.private_message, archetype: Archetype.private_message,
target_usernames: @receivers.map(&:username).join(","), target_usernames: receiver_usernames,
skip_validations: true, skip_validations: true,
) )
@ -127,6 +138,7 @@ module DiscourseAi
input = input.split("\n").map { |line| " #{line}" }.join("\n") input = input.split("\n").map { |line| " #{line}" }.join("\n")
raw = <<~RAW raw = <<~RAW
``` ```
tokens: #{@llm.tokenizer.tokenize(input).length}
start_date: #{start_date}, start_date: #{start_date},
duration: #{@days.days}, duration: #{@days.days},
max_posts: #{@sample_size}, max_posts: #{@sample_size},
@ -141,6 +153,16 @@ module DiscourseAi
PostCreator.create!(@sender, raw: raw, topic_id: post.topic_id, skip_validations: true) PostCreator.create!(@sender, raw: raw, topic_id: post.topic_id, skip_validations: true)
end end
end end
if @email_receivers.present?
@email_receivers.each do |to_address|
Email::Sender.new(
::AiReportMailer.send_report(to_address, subject: @title, body: result),
:ai_report,
).send
end
end
end
end end
end end
end end

View File

@ -78,6 +78,9 @@ module DiscourseAi
def extract_completion_from(response_raw) def extract_completion_from(response_raw)
parsed = JSON.parse(response_raw, symbolize_names: true).dig(:choices, 0) parsed = JSON.parse(response_raw, symbolize_names: true).dig(:choices, 0)
# half a line sent here
return if !parsed
response_h = @streaming_mode ? parsed.dig(:delta) : parsed.dig(:message) response_h = @streaming_mode ? parsed.dig(:delta) : parsed.dig(:message)
has_function_call = response_h.dig(:tool_calls).present? has_function_call = response_h.dig(:tool_calls).present?

View File

@ -14,6 +14,34 @@ module DiscourseAi
fab!(:secure_post) { Fabricate(:post, raw: "Top secret date !!!!", topic: secure_topic) } fab!(:secure_post) { Fabricate(:post, raw: "Top secret date !!!!", topic: secure_topic) }
describe "#run!" do describe "#run!" do
it "is able to generate email reports" do
freeze_time
DiscourseAi::Completions::Llm.with_prepared_responses(["magical report"]) do
ReportRunner.run!(
sender_username: user.username,
receivers: ["fake@discourse.com"],
title: "test report %DATE%",
model: "gpt-4",
category_ids: nil,
tags: nil,
allow_secure_categories: false,
sample_size: 100,
instructions: "make a magic report",
days: 7,
offset: 0,
priority_group_id: nil,
tokens_per_post: 150,
debug_mode: nil,
)
end
expect(ActionMailer::Base.deliveries.length).to eq(1)
expect(ActionMailer::Base.deliveries.first.subject).to eq(
"test report #{7.days.ago.strftime("%Y-%m-%d")} - #{Time.zone.now.strftime("%Y-%m-%d")}",
)
end
it "generates correctly respects the params" do it "generates correctly respects the params" do
DiscourseAi::Completions::Llm.with_prepared_responses(["magical report"]) do DiscourseAi::Completions::Llm.with_prepared_responses(["magical report"]) do
ReportRunner.run!( ReportRunner.run!(

View File

@ -4,6 +4,8 @@ require "rails_helper"
describe DiscourseAi::Inference::OpenAiCompletions do describe DiscourseAi::Inference::OpenAiCompletions do
before { SiteSetting.ai_openai_api_key = "abc-123" } before { SiteSetting.ai_openai_api_key = "abc-123" }
fab!(:user)
it "supports sending an organization id" do it "supports sending an organization id" do
SiteSetting.ai_openai_organization = "org_123" SiteSetting.ai_openai_organization = "org_123"
@ -302,6 +304,31 @@ describe DiscourseAi::Inference::OpenAiCompletions do
restore_net_http restore_net_http
end end
it "supports extremely slow streaming under new interface" do
raw_data = <<~TEXT
data: {"choices":[{"delta":{"content":"test"}}]}
data: {"choices":[{"delta":{"content":"test1"}}]}
data: {"choices":[{"delta":{"content":"test2"}}]}
data: [DONE]
TEXT
chunks = raw_data.split("")
stub_request(:post, "https://api.openai.com/v1/chat/completions").to_return(
status: 200,
body: chunks,
)
partials = []
llm = DiscourseAi::Completions::Llm.proxy("gpt-3.5-turbo")
llm.completion!({ insts: "test" }, user) { |partial| partials << partial }
expect(partials.join).to eq("testtest1test2")
end
it "support extremely slow streaming" do it "support extremely slow streaming" do
raw_data = <<~TEXT raw_data = <<~TEXT
data: {"choices":[{"delta":{"content":"test"}}]} data: {"choices":[{"delta":{"content":"test"}}]}