From fc6540489698e581a5b2ac338cb2298210a949a7 Mon Sep 17 00:00:00 2001 From: Sam Date: Wed, 1 Nov 2023 08:41:31 +1100 Subject: [PATCH] FEATURE: support topic_id and post_id logging in ai audit log (#274) This makes it easier to track who is responsible for a completion in logs Note: ai helper and summarization are not yet implemented --- app/models/ai_api_audit_log.rb | 2 ++ ...1031050538_add_topic_id_post_id_to_ai_audit_log.rb | 8 ++++++++ lib/modules/ai_bot/anthropic_bot.rb | 3 ++- lib/modules/ai_bot/bot.rb | 11 +++++++++-- lib/modules/ai_bot/open_ai_bot.rb | 9 ++++++++- lib/shared/inference/anthropic_completions.rb | 3 +++ lib/shared/inference/openai_completions.rb | 3 +++ spec/lib/modules/ai_bot/bot_spec.rb | 4 +++- 8 files changed, 38 insertions(+), 5 deletions(-) create mode 100644 db/migrate/20231031050538_add_topic_id_post_id_to_ai_audit_log.rb diff --git a/app/models/ai_api_audit_log.rb b/app/models/ai_api_audit_log.rb index 32600282..f7cabefc 100644 --- a/app/models/ai_api_audit_log.rb +++ b/app/models/ai_api_audit_log.rb @@ -15,6 +15,8 @@ end # id :bigint not null, primary key # provider_id :integer not null # user_id :integer +# topic_id :integer +# post_id :integer # request_tokens :integer # response_tokens :integer # raw_request_payload :string diff --git a/db/migrate/20231031050538_add_topic_id_post_id_to_ai_audit_log.rb b/db/migrate/20231031050538_add_topic_id_post_id_to_ai_audit_log.rb new file mode 100644 index 00000000..9f91cf59 --- /dev/null +++ b/db/migrate/20231031050538_add_topic_id_post_id_to_ai_audit_log.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +class AddTopicIdPostIdToAiAuditLog < ActiveRecord::Migration[7.0] + def change + add_column :ai_api_audit_logs, :topic_id, :integer + add_column :ai_api_audit_logs, :post_id, :integer + end +end diff --git a/lib/modules/ai_bot/anthropic_bot.rb b/lib/modules/ai_bot/anthropic_bot.rb index 364e9df8..0a3e71a2 100644 --- a/lib/modules/ai_bot/anthropic_bot.rb +++ b/lib/modules/ai_bot/anthropic_bot.rb @@ -53,12 +53,13 @@ module DiscourseAi ).dig(:completion) end - def submit_prompt(prompt, prefer_low_cost: false, &blk) + def submit_prompt(prompt, post: nil, prefer_low_cost: false, &blk) DiscourseAi::Inference::AnthropicCompletions.perform!( prompt, model_for, temperature: 0.4, max_tokens: 3000, + post: post, &blk ) end diff --git a/lib/modules/ai_bot/bot.rb b/lib/modules/ai_bot/bot.rb index bd6c00ed..52594b9b 100644 --- a/lib/modules/ai_bot/bot.rb +++ b/lib/modules/ai_bot/bot.rb @@ -137,7 +137,7 @@ module DiscourseAi context = {} functions = FunctionCalls.new - submit_prompt(prompt, prefer_low_cost: prefer_low_cost) do |partial, cancel| + submit_prompt(prompt, post: post, prefer_low_cost: prefer_low_cost) do |partial, cancel| current_delta = get_delta(partial, context) partial_reply << current_delta @@ -335,7 +335,14 @@ module DiscourseAi tokenizer.tokenize(text) end - def submit_prompt(prompt, prefer_low_cost: false, &blk) + def submit_prompt( + prompt, + post:, + prefer_low_cost: false, + temperature: nil, + max_tokens: nil, + &blk + ) raise NotImplemented end diff --git a/lib/modules/ai_bot/open_ai_bot.rb b/lib/modules/ai_bot/open_ai_bot.rb index 3a6ae7b2..42e4da8b 100644 --- a/lib/modules/ai_bot/open_ai_bot.rb +++ b/lib/modules/ai_bot/open_ai_bot.rb @@ -41,6 +41,7 @@ module DiscourseAi def submit_prompt( prompt, prefer_low_cost: false, + post: nil, temperature: nil, top_p: nil, max_tokens: nil, @@ -57,7 +58,13 @@ module DiscourseAi params[:functions] = available_functions if available_functions.present? - DiscourseAi::Inference::OpenAiCompletions.perform!(prompt, model, **params, &blk) + DiscourseAi::Inference::OpenAiCompletions.perform!( + prompt, + model, + **params, + post: post, + &blk + ) end def tokenizer diff --git a/lib/shared/inference/anthropic_completions.rb b/lib/shared/inference/anthropic_completions.rb index 329c69aa..1c7c17ef 100644 --- a/lib/shared/inference/anthropic_completions.rb +++ b/lib/shared/inference/anthropic_completions.rb @@ -14,6 +14,7 @@ module ::DiscourseAi max_tokens: nil, user_id: nil, stop_sequences: nil, + post: nil, &blk ) # HACK to get around the fact that they have different APIs @@ -78,6 +79,8 @@ module ::DiscourseAi provider_id: AiApiAuditLog::Provider::Anthropic, raw_request_payload: request_body, user_id: user_id, + post_id: post&.id, + topic_id: post&.topic_id, ) if !block_given? diff --git a/lib/shared/inference/openai_completions.rb b/lib/shared/inference/openai_completions.rb index 2bfa0ca0..3c67412c 100644 --- a/lib/shared/inference/openai_completions.rb +++ b/lib/shared/inference/openai_completions.rb @@ -20,6 +20,7 @@ module ::DiscourseAi user_id: nil, retries: DEFAULT_RETRIES, retry_timeout: DEFAULT_RETRY_TIMEOUT_SECONDS, + post: nil, &blk ) log = nil @@ -103,6 +104,8 @@ module ::DiscourseAi provider_id: AiApiAuditLog::Provider::OpenAI, raw_request_payload: request_body, user_id: user_id, + post_id: post&.id, + topic_id: post&.topic_id, ) if !blk diff --git a/spec/lib/modules/ai_bot/bot_spec.rb b/spec/lib/modules/ai_bot/bot_spec.rb index 3a2d654c..5d7ae734 100644 --- a/spec/lib/modules/ai_bot/bot_spec.rb +++ b/spec/lib/modules/ai_bot/bot_spec.rb @@ -23,7 +23,7 @@ class FakeBot < DiscourseAi::AiBot::Bot "#{role}: #{content}" end - def submit_prompt(prompt, prefer_low_cost: false) + def submit_prompt(prompt, post: nil, prefer_low_cost: false) rows = @responses.shift rows.each { |data| yield data, lambda {} } end @@ -173,6 +173,8 @@ describe DiscourseAi::AiBot::Bot do expect(last.post_custom_prompt.custom_prompt).to eq( [[result, "search", "function"], ["I found nothing, sorry", bot_user.username]], ) + log = AiApiAuditLog.find_by(post_id: second_post.id) + expect(log).to be_present end end