FEATURE: support topic_id and post_id logging in ai audit log (#274)

This makes it easier to track who is responsible for a completion
in logs

Note: ai helper and summarization are not yet implemented
This commit is contained in:
Sam 2023-11-01 08:41:31 +11:00 committed by GitHub
parent c6d5c56033
commit fc65404896
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 38 additions and 5 deletions

View File

@ -15,6 +15,8 @@ end
# id :bigint not null, primary key # id :bigint not null, primary key
# provider_id :integer not null # provider_id :integer not null
# user_id :integer # user_id :integer
# topic_id :integer
# post_id :integer
# request_tokens :integer # request_tokens :integer
# response_tokens :integer # response_tokens :integer
# raw_request_payload :string # raw_request_payload :string

View File

@ -0,0 +1,8 @@
# frozen_string_literal: true
class AddTopicIdPostIdToAiAuditLog < ActiveRecord::Migration[7.0]
def change
add_column :ai_api_audit_logs, :topic_id, :integer
add_column :ai_api_audit_logs, :post_id, :integer
end
end

View File

@ -53,12 +53,13 @@ module DiscourseAi
).dig(:completion) ).dig(:completion)
end end
def submit_prompt(prompt, prefer_low_cost: false, &blk) def submit_prompt(prompt, post: nil, prefer_low_cost: false, &blk)
DiscourseAi::Inference::AnthropicCompletions.perform!( DiscourseAi::Inference::AnthropicCompletions.perform!(
prompt, prompt,
model_for, model_for,
temperature: 0.4, temperature: 0.4,
max_tokens: 3000, max_tokens: 3000,
post: post,
&blk &blk
) )
end end

View File

@ -137,7 +137,7 @@ module DiscourseAi
context = {} context = {}
functions = FunctionCalls.new functions = FunctionCalls.new
submit_prompt(prompt, prefer_low_cost: prefer_low_cost) do |partial, cancel| submit_prompt(prompt, post: post, prefer_low_cost: prefer_low_cost) do |partial, cancel|
current_delta = get_delta(partial, context) current_delta = get_delta(partial, context)
partial_reply << current_delta partial_reply << current_delta
@ -335,7 +335,14 @@ module DiscourseAi
tokenizer.tokenize(text) tokenizer.tokenize(text)
end end
def submit_prompt(prompt, prefer_low_cost: false, &blk) def submit_prompt(
prompt,
post:,
prefer_low_cost: false,
temperature: nil,
max_tokens: nil,
&blk
)
raise NotImplemented raise NotImplemented
end end

View File

@ -41,6 +41,7 @@ module DiscourseAi
def submit_prompt( def submit_prompt(
prompt, prompt,
prefer_low_cost: false, prefer_low_cost: false,
post: nil,
temperature: nil, temperature: nil,
top_p: nil, top_p: nil,
max_tokens: nil, max_tokens: nil,
@ -57,7 +58,13 @@ module DiscourseAi
params[:functions] = available_functions if available_functions.present? params[:functions] = available_functions if available_functions.present?
DiscourseAi::Inference::OpenAiCompletions.perform!(prompt, model, **params, &blk) DiscourseAi::Inference::OpenAiCompletions.perform!(
prompt,
model,
**params,
post: post,
&blk
)
end end
def tokenizer def tokenizer

View File

@ -14,6 +14,7 @@ module ::DiscourseAi
max_tokens: nil, max_tokens: nil,
user_id: nil, user_id: nil,
stop_sequences: nil, stop_sequences: nil,
post: nil,
&blk &blk
) )
# HACK to get around the fact that they have different APIs # HACK to get around the fact that they have different APIs
@ -78,6 +79,8 @@ module ::DiscourseAi
provider_id: AiApiAuditLog::Provider::Anthropic, provider_id: AiApiAuditLog::Provider::Anthropic,
raw_request_payload: request_body, raw_request_payload: request_body,
user_id: user_id, user_id: user_id,
post_id: post&.id,
topic_id: post&.topic_id,
) )
if !block_given? if !block_given?

View File

@ -20,6 +20,7 @@ module ::DiscourseAi
user_id: nil, user_id: nil,
retries: DEFAULT_RETRIES, retries: DEFAULT_RETRIES,
retry_timeout: DEFAULT_RETRY_TIMEOUT_SECONDS, retry_timeout: DEFAULT_RETRY_TIMEOUT_SECONDS,
post: nil,
&blk &blk
) )
log = nil log = nil
@ -103,6 +104,8 @@ module ::DiscourseAi
provider_id: AiApiAuditLog::Provider::OpenAI, provider_id: AiApiAuditLog::Provider::OpenAI,
raw_request_payload: request_body, raw_request_payload: request_body,
user_id: user_id, user_id: user_id,
post_id: post&.id,
topic_id: post&.topic_id,
) )
if !blk if !blk

View File

@ -23,7 +23,7 @@ class FakeBot < DiscourseAi::AiBot::Bot
"#{role}: #{content}" "#{role}: #{content}"
end end
def submit_prompt(prompt, prefer_low_cost: false) def submit_prompt(prompt, post: nil, prefer_low_cost: false)
rows = @responses.shift rows = @responses.shift
rows.each { |data| yield data, lambda {} } rows.each { |data| yield data, lambda {} }
end end
@ -173,6 +173,8 @@ describe DiscourseAi::AiBot::Bot do
expect(last.post_custom_prompt.custom_prompt).to eq( expect(last.post_custom_prompt.custom_prompt).to eq(
[[result, "search", "function"], ["I found nothing, sorry", bot_user.username]], [[result, "search", "function"], ["I found nothing, sorry", bot_user.username]],
) )
log = AiApiAuditLog.find_by(post_id: second_post.id)
expect(log).to be_present
end end
end end