FIX: refine image and read command (#131)
- Attempt to hint reading is done by sending complete:true - Do not include post_number in result unless it was sent in - Rush visual feedback when a command is run (ensure we always revise) - Include hyperlink in read command description - Stop round tripping to GPT after image generation (speeds up images by a lot) - Add a test for image command
This commit is contained in:
parent
958dfc360e
commit
7eedbf29e0
|
@ -102,7 +102,7 @@ en:
|
|||
google: "Search Google"
|
||||
read: "Read topic"
|
||||
command_description:
|
||||
read: "Reading: %{title}"
|
||||
read: "Reading: <a href='%{url}'>%{title}</a>"
|
||||
time: "Time in %{timezone} is %{time}"
|
||||
summarize: "Summarized <a href='%{url}'>%{title}</a>"
|
||||
image: "%{prompt}"
|
||||
|
|
|
@ -125,11 +125,13 @@ module DiscourseAi
|
|||
|
||||
raw = post.raw.sub(placeholder, raw)
|
||||
|
||||
post.revise(bot_user, { raw: raw }, skip_validations: true, skip_revision: true)
|
||||
|
||||
if chain_next_response
|
||||
# somewhat annoying but whitespace was stripped in revise
|
||||
# so we need to save again
|
||||
post.raw = raw
|
||||
post.save!(validate: false)
|
||||
else
|
||||
post.revise(bot_user, { raw: raw }, skip_validations: true, skip_revision: true)
|
||||
end
|
||||
|
||||
[chain_next_response, post]
|
||||
|
|
|
@ -15,20 +15,13 @@ module DiscourseAi::AiBot::Commands
|
|||
[
|
||||
Parameter.new(
|
||||
name: "prompt",
|
||||
description: "The prompt used to generate or create or draw the image",
|
||||
description:
|
||||
"The prompt used to generate or create or draw the image (40 words or less, be creative)",
|
||||
type: "string",
|
||||
required: true,
|
||||
),
|
||||
]
|
||||
end
|
||||
|
||||
def custom_system_message
|
||||
<<~TEXT
|
||||
In Discourse the markdown (description|SIZE, ZOOM%)[upload://SOMETEXT] is used to denote images and uploads. NEVER try changing the to http or https links.
|
||||
ALWAYS prefer the upload:// format if available.
|
||||
When rendering multiple images place them in a [grid] ... [/grid] block
|
||||
TEXT
|
||||
end
|
||||
end
|
||||
|
||||
def result_name
|
||||
|
@ -40,7 +33,11 @@ module DiscourseAi::AiBot::Commands
|
|||
end
|
||||
|
||||
def chain_next_response
|
||||
true
|
||||
false
|
||||
end
|
||||
|
||||
def custom_raw
|
||||
@custom_raw
|
||||
end
|
||||
|
||||
def process(prompt:)
|
||||
|
@ -58,7 +55,8 @@ module DiscourseAi::AiBot::Commands
|
|||
f.unlink
|
||||
end
|
||||
|
||||
raw = <<~RAW
|
||||
@custom_raw = <<~RAW
|
||||
|
||||
[grid]
|
||||
#{
|
||||
uploads
|
||||
|
@ -68,7 +66,7 @@ module DiscourseAi::AiBot::Commands
|
|||
[/grid]
|
||||
RAW
|
||||
|
||||
{ prompt: prompt, markdown: raw, display_to_user: true }
|
||||
{ prompt: prompt, displayed_to_user: true }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -30,7 +30,7 @@ module DiscourseAi::AiBot::Commands
|
|||
end
|
||||
|
||||
def description_args
|
||||
{ title: @title }
|
||||
{ title: @title, url: @url }
|
||||
end
|
||||
|
||||
def process(topic_id:, post_number: nil)
|
||||
|
@ -46,6 +46,8 @@ module DiscourseAi::AiBot::Commands
|
|||
@title = topic.title
|
||||
|
||||
posts = Post.secured(Guardian.new).where(topic_id: topic_id).order(:post_number).limit(40)
|
||||
@url = topic.relative_url(post_number)
|
||||
|
||||
posts = posts.where("post_number = ?", post_number) if post_number
|
||||
|
||||
content = +"title: #{topic.title}\n\n"
|
||||
|
@ -55,7 +57,9 @@ module DiscourseAi::AiBot::Commands
|
|||
# TODO: 16k or 100k models can handle a lot more tokens
|
||||
content = ::DiscourseAi::Tokenizer::BertTokenizer.truncate(content, 1500).squish
|
||||
|
||||
{ topic_id: topic_id, post_number: post_number, content: content }
|
||||
result = { topic_id: topic_id, content: content, complete: true }
|
||||
result[:post_number] = post_number if post_number
|
||||
result
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#frozen_string_literal: true
|
||||
|
||||
require_relative "../../../../support/openai_completions_inference_stubs"
|
||||
|
||||
RSpec.describe DiscourseAi::AiBot::Commands::GoogleCommand do
|
||||
fab!(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
|
||||
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
#frozen_string_literal: true
|
||||
|
||||
RSpec.describe DiscourseAi::AiBot::Commands::ImageCommand do
|
||||
fab!(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT3_5_TURBO_ID) }
|
||||
|
||||
describe "#process" do
|
||||
it "can generate correct info" do
|
||||
post = Fabricate(:post)
|
||||
|
||||
SiteSetting.ai_stability_api_url = "https://api.stability.dev"
|
||||
SiteSetting.ai_stability_api_key = "abc"
|
||||
|
||||
image =
|
||||
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mP8z8BQDwAEhQGAhKmMIQAAAABJRU5ErkJggg=="
|
||||
|
||||
stub_request(
|
||||
:post,
|
||||
"https://api.stability.dev/v1/generation/#{SiteSetting.ai_stability_engine}/text-to-image",
|
||||
)
|
||||
.with do |request|
|
||||
json = JSON.parse(request.body)
|
||||
expect(json["text_prompts"][0]["text"]).to eq("a pink cow")
|
||||
true
|
||||
end
|
||||
.to_return(status: 200, body: { artifacts: [{ base64: image }, { base64: image }] }.to_json)
|
||||
|
||||
image = described_class.new(bot_user, post)
|
||||
info = image.process(prompt: "a pink cow").to_json
|
||||
|
||||
expect(JSON.parse(info)).to eq("prompt" => "a pink cow", "displayed_to_user" => true)
|
||||
expect(image.custom_raw).to include("upload://")
|
||||
expect(image.custom_raw).to include("[grid]")
|
||||
expect(image.custom_raw).to include("a pink cow")
|
||||
end
|
||||
end
|
||||
end
|
|
@ -15,6 +15,7 @@ RSpec.describe DiscourseAi::AiBot::Commands::ReadCommand do
|
|||
expect(results[:topic_id]).to eq(post1.topic_id)
|
||||
expect(results[:content]).to include("hello")
|
||||
expect(results[:content]).to include("sam")
|
||||
expect(read.description_args).to eq(title: post1.topic.title, url: post1.topic.relative_url)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue