From dd4e305ff7c9448cb9cbff22945cefc49f088593 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20Guitaut?= Date: Tue, 28 May 2024 11:15:42 +0200 Subject: [PATCH] DEV: Update rubocop-discourse to version 3.8.0 (#641) --- Gemfile.lock | 40 ++++++++++++------- app/jobs/regular/generate_embeddings.rb | 2 +- app/models/model_accuracy.rb | 4 +- ...rate_embeddings_from_dedicated_database.rb | 2 +- lib/ai_helper/entry_point.rb | 4 +- lib/embeddings/semantic_search.rb | 2 +- lib/embeddings/strategies/truncation.rb | 4 +- package.json | 9 ++--- 8 files changed, 38 insertions(+), 29 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 571925e1..27fcd637 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ GEM remote: https://rubygems.org/ specs: - activesupport (7.1.3.2) + activesupport (7.1.3.3) base64 bigdecimal concurrent-ruby (~> 1.0, >= 1.0.2) @@ -13,27 +13,28 @@ GEM tzinfo (~> 2.0) ast (2.4.2) base64 (0.2.0) - bigdecimal (3.1.6) + bigdecimal (3.1.8) concurrent-ruby (1.2.3) connection_pool (2.4.1) drb (2.2.1) - i18n (1.14.1) + i18n (1.14.5) concurrent-ruby (~> 1.0) - json (2.7.1) + json (2.7.2) language_server-protocol (3.17.0.3) - minitest (5.22.2) + minitest (5.23.1) mutex_m (0.2.0) parallel (1.24.0) - parser (3.3.0.5) + parser (3.3.1.0) ast (~> 2.4.1) racc prettier_print (1.2.1) - racc (1.7.3) + racc (1.8.0) + rack (3.0.11) rainbow (3.1.1) - regexp_parser (2.9.0) + regexp_parser (2.9.2) rexml (3.2.8) strscan (>= 3.0.9) - rubocop (1.61.0) + rubocop (1.64.0) json (~> 2.3) language_server-protocol (>= 3.17.0) parallel (~> 1.10) @@ -41,25 +42,34 @@ GEM rainbow (>= 2.2.2, < 4.0) regexp_parser (>= 1.8, < 3.0) rexml (>= 3.2.5, < 4.0) - rubocop-ast (>= 1.30.0, < 2.0) + rubocop-ast (>= 1.31.1, < 2.0) ruby-progressbar (~> 1.7) unicode-display_width (>= 2.4.0, < 3.0) - rubocop-ast (1.31.1) - parser (>= 3.3.0.4) + rubocop-ast (1.31.3) + parser (>= 3.3.1.0) rubocop-capybara (2.20.0) rubocop (~> 1.41) - rubocop-discourse (3.7.1) + rubocop-discourse (3.8.0) activesupport (>= 6.1) rubocop (>= 1.59.0) rubocop-capybara (>= 2.0.0) rubocop-factory_bot (>= 2.0.0) + rubocop-rails (>= 2.25.0) rubocop-rspec (>= 2.25.0) rubocop-factory_bot (2.25.1) rubocop (~> 1.41) - rubocop-rspec (2.27.1) + rubocop-rails (2.25.0) + activesupport (>= 4.2.0) + rack (>= 1.1) + rubocop (>= 1.33.0, < 2.0) + rubocop-ast (>= 1.31.1, < 2.0) + rubocop-rspec (2.29.2) rubocop (~> 1.40) rubocop-capybara (~> 2.17) rubocop-factory_bot (~> 2.22) + rubocop-rspec_rails (~> 2.28) + rubocop-rspec_rails (2.28.3) + rubocop (~> 1.40) ruby-progressbar (1.13.0) strscan (3.1.0) syntax_tree (6.2.0) @@ -76,4 +86,4 @@ DEPENDENCIES syntax_tree BUNDLED WITH - 2.5.4 + 2.5.10 diff --git a/app/jobs/regular/generate_embeddings.rb b/app/jobs/regular/generate_embeddings.rb index d80b85e5..70961a2d 100644 --- a/app/jobs/regular/generate_embeddings.rb +++ b/app/jobs/regular/generate_embeddings.rb @@ -12,7 +12,7 @@ module Jobs topic = target.is_a?(Topic) ? target : target.topic post = target.is_a?(Post) ? target : target.first_post - return unless topic.present? && post.present? + return if topic.blank? || post.blank? return if topic.private_message? && !SiteSetting.ai_embeddings_generate_for_pms return if post.raw.blank? diff --git a/app/models/model_accuracy.rb b/app/models/model_accuracy.rb index 933653a7..07d4b65b 100644 --- a/app/models/model_accuracy.rb +++ b/app/models/model_accuracy.rb @@ -2,8 +2,8 @@ class ModelAccuracy < ActiveRecord::Base def self.adjust_model_accuracy(new_status, reviewable) - return unless %i[approved rejected].include?(new_status) - return unless [ReviewableAiPost, ReviewableAiChatMessage].include?(reviewable.class) + return if %i[approved rejected].exclude?(new_status) + return if [ReviewableAiPost, ReviewableAiChatMessage].exclude?(reviewable.class) verdicts = reviewable.payload.to_h["verdicts"] || {} diff --git a/db/migrate/20230710171143_migrate_embeddings_from_dedicated_database.rb b/db/migrate/20230710171143_migrate_embeddings_from_dedicated_database.rb index 5bac1c0c..aef3b2be 100644 --- a/db/migrate/20230710171143_migrate_embeddings_from_dedicated_database.rb +++ b/db/migrate/20230710171143_migrate_embeddings_from_dedicated_database.rb @@ -3,7 +3,7 @@ class MigrateEmbeddingsFromDedicatedDatabase < ActiveRecord::Migration[7.0] def up return unless SiteSetting.ai_embeddings_enabled - return unless SiteSetting.ai_embeddings_pg_connection_string.present? + return if SiteSetting.ai_embeddings_pg_connection_string.blank? truncation = DiscourseAi::Embeddings::Strategies::Truncation.new diff --git a/lib/ai_helper/entry_point.rb b/lib/ai_helper/entry_point.rb index 0032f098..3e0e1eec 100644 --- a/lib/ai_helper/entry_point.rb +++ b/lib/ai_helper/entry_point.rb @@ -13,8 +13,8 @@ module DiscourseAi plugin.on(:chat_message_created) do |message, channel, user, extra| next unless SiteSetting.composer_ai_helper_enabled next unless SiteSetting.ai_helper_automatic_chat_thread_title - next unless extra[:thread].present? - next unless extra[:thread].title.blank? + next if extra[:thread].blank? + next if extra[:thread].title.present? reply_count = extra[:thread].replies.count diff --git a/lib/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb index 824f280c..967edf95 100644 --- a/lib/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -171,7 +171,7 @@ module DiscourseAi SiteSetting.ai_embeddings_semantic_search_hyde_model, ).generate(prompt, user: @guardian.user, feature_name: "semantic_search_hyde") - Nokogiri::HTML5.fragment(llm_response).at("ai")&.text&.presence || llm_response + Nokogiri::HTML5.fragment(llm_response).at("ai")&.text.presence || llm_response end private diff --git a/lib/embeddings/strategies/truncation.rb b/lib/embeddings/strategies/truncation.rb index ced670ab..1ae82d80 100644 --- a/lib/embeddings/strategies/truncation.rb +++ b/lib/embeddings/strategies/truncation.rb @@ -49,7 +49,7 @@ module DiscourseAi def topic_truncation(topic, tokenizer, max_length) text = +topic_information(topic) - if topic&.topic_embed&.embed_content_cache&.present? + if topic&.topic_embed&.embed_content_cache.present? text << Nokogiri::HTML5.fragment(topic.topic_embed.embed_content_cache).text text << "\n\n" end @@ -66,7 +66,7 @@ module DiscourseAi def post_truncation(post, tokenizer, max_length) text = +topic_information(post.topic) - if post.is_first_post? && post.topic&.topic_embed&.embed_content_cache&.present? + if post.is_first_post? && post.topic&.topic_embed&.embed_content_cache.present? text << Nokogiri::HTML5.fragment(post.topic.topic_embed.embed_content_cache).text else text << Nokogiri::HTML5.fragment(post.cooked).text diff --git a/package.json b/package.json index dc4878cc..b032ece1 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,9 @@ { - "name": "discourse-ai", "private": true, "devDependencies": { - "@discourse/lint-configs": "^1.3.4", - "ember-template-lint": "^5.13.0", - "eslint": "^8.56.0", - "prettier": "^2.8.8" + "@discourse/lint-configs": "1.3.9", + "ember-template-lint": "6.0.0", + "eslint": "8.57.0", + "prettier": "2.8.8" } }