Skip to content

Commit

Permalink
Fixing conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
caiosba committed Dec 6, 2024
2 parents d04bda3 + bd4c1f5 commit d0a2298
Show file tree
Hide file tree
Showing 78 changed files with 2,749 additions and 429 deletions.
2 changes: 1 addition & 1 deletion .codeclimate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: "2"
checks:
argument-count:
config:
threshold: 8
threshold: 9
complex-logic:
config:
threshold: 4
Expand Down
6 changes: 1 addition & 5 deletions .github/workflows/ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,10 @@ on:
- cron: '0 5 * * *' #Runs daily at 5 AM UTC
push:
branches:
- master
- develop
- epic*
- cv2*
- '*'
pull_request:
branches:
- develop


env:
CC_TEST_REPORTER_ID: "${{ secrets.CC_TEST_REPORTER_ID }}"
Expand Down
4 changes: 2 additions & 2 deletions .rubocop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -243,10 +243,10 @@ Metrics/ModuleLength:
Max: 250

Metrics/ParameterLists:
Description: 'Avoid parameter lists longer than three or four parameters.'
Description: 'Avoid parameter lists longer than 9 parameters.'
StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#too-many-params'
Enabled: true
Max: 8
Max: 9

Metrics/PerceivedComplexity:
Description: >-
Expand Down
4 changes: 1 addition & 3 deletions Gemfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -726,8 +726,7 @@ GEM
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
retriable (3.1.2)
rexml (3.3.6)
strscan
rexml (3.3.9)
rotp (6.3.0)
rqrcode (2.1.1)
chunky_png (~> 1.0)
Expand Down Expand Up @@ -817,7 +816,6 @@ GEM
ssrf_filter (1.0.7)
streamio-ffmpeg (3.0.2)
multi_json (~> 1.8)
strscan (3.1.0)
swagger-docs (0.2.9)
activesupport (>= 3)
rails (>= 3)
Expand Down
15 changes: 14 additions & 1 deletion app/graph/mutations/graphql_crud_operations.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,20 @@ def self.safe_save(obj, attrs, parent_names = [])
obj.send(method, value) if obj.respond_to?(method)
end
obj.disable_es_callbacks = Rails.env.to_s == "test"
obj.save_with_version!

begin
obj.save_with_version!
rescue RuntimeError => e
if e.message.include?("\"code\":#{LapisConstants::ErrorCodes::const_get('DUPLICATED')}") &&
obj.is_a?(ProjectMedia) &&
obj.set_fact_check.present? &&
obj.set_original_claim.present?
existing_pm = ProjectMedia.find(JSON.parse(e.message)['data']['id'])
obj = ProjectMedia.handle_fact_check_for_existing_claim(existing_pm,obj)
else
raise e
end
end

name = obj.class_name.underscore
{ name.to_sym => obj }.merge(
Expand Down
40 changes: 40 additions & 0 deletions app/graph/types/team_statistics_type.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
class TeamStatisticsType < DefaultObject
description 'Workspace statistics.'

implements GraphQL::Types::Relay::Node

# For articles

field :number_of_articles_created_by_date, JsonStringType, null: true
field :number_of_articles_updated_by_date, JsonStringType, null: true
field :number_of_explainers_created, GraphQL::Types::Int, null: true
field :number_of_fact_checks_created, GraphQL::Types::Int, null: true
field :number_of_published_fact_checks, GraphQL::Types::Int, null: true
field :number_of_fact_checks_by_rating, JsonStringType, null: true
field :top_articles_sent, JsonStringType, null: true
field :top_articles_tags, JsonStringType, null: true

# For tiplines

field :number_of_messages, GraphQL::Types::Int, null: true
field :number_of_conversations, GraphQL::Types::Int, null: true
field :number_of_messages_by_date, JsonStringType, null: true
field :number_of_conversations_by_date, JsonStringType, null: true
field :number_of_search_results_by_feedback_type, JsonStringType, null: true
field :average_response_time, GraphQL::Types::Int, null: true
field :number_of_unique_users, GraphQL::Types::Int, null: true
field :number_of_total_users, GraphQL::Types::Int, null: true
field :number_of_returning_users, GraphQL::Types::Int, null: true
field :number_of_subscribers, GraphQL::Types::Int, null: true
field :number_of_new_subscribers, GraphQL::Types::Int, null: true
field :number_of_newsletters_sent, GraphQL::Types::Int, null: true
field :number_of_newsletters_delivered, GraphQL::Types::Int, null: true
field :top_media_tags, JsonStringType, null: true
field :top_requested_media_clusters, JsonStringType, null: true
field :number_of_media_received_by_media_type, JsonStringType, null: true

# For both articles and tiplines

field :number_of_articles_sent, GraphQL::Types::Int, null: true
field :number_of_matched_results_by_article_type, JsonStringType, null: true
end
12 changes: 12 additions & 0 deletions app/graph/types/team_type.rb
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,7 @@ def api_key(dbid:)
end

field :api_keys, ApiKeyType.connection_type, null: true

def api_keys
ability = context[:ability] || Ability.new
api_keys = object.api_keys.order(created_at: :desc)
Expand All @@ -388,4 +389,15 @@ def api_keys
ability.can?(:read, api_key)
end
end

field :statistics, TeamStatisticsType, null: true do
argument :period, GraphQL::Types::String, required: true # FIXME: List/validate possible values
argument :language, GraphQL::Types::String, required: false
argument :platform, GraphQL::Types::String, required: false # FIXME: List/validate possible values
end

def statistics(period:, language: nil, platform: nil)
return nil unless User.current&.is_admin
TeamStatistics.new(object, period, language, platform)
end
end
30 changes: 17 additions & 13 deletions app/lib/check_cached_fields.rb
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def cached_field(name, options = {})
klass = self
update_on[:events].each do |event, callback|
model.send "after_#{event}", ->(obj) do
klass.update_cached_field(name, obj, update_on[:if], update_on[:affected_ids], callback, options)
klass.update_cached_field(name, obj, update_on[:if], update_on[:affected_ids], callback, options, event)
end
end
end
Expand Down Expand Up @@ -97,12 +97,13 @@ def index_cached_field(options, value, name, obj)
update_pg: options[:update_pg],
pg_field_name: options[:pg_field_name],
}
self.delay_for(1.second).index_cached_field_bg(index_options, value, name, obj)
self.delay_for(1.second).index_cached_field_bg(index_options, value, name, obj.class.name, obj.id)
end
end

def index_cached_field_bg(index_options, value, name, obj)
self.index_and_pg_cached_field(index_options, value, name, obj)
def index_cached_field_bg(index_options, value, name, klass, id)
obj = klass.constantize.find_by_id id
self.index_and_pg_cached_field(index_options, value, name, obj) unless obj.nil?
end

def update_pg_cache_field(options, value, name, target)
Expand All @@ -120,7 +121,7 @@ def create_cached_field(options, name, obj)
self.index_cached_field(options, value, name, obj) unless Rails.env == 'test'
end

def update_cached_field(name, obj, condition, ids, callback, options)
def update_cached_field(name, obj, condition, ids, callback, options, event)
return if self.skip_cached_field_update?
condition ||= proc { true }
return unless condition.call(obj)
Expand All @@ -136,17 +137,20 @@ def update_cached_field(name, obj, condition, ids, callback, options)
pg_field_name: options[:pg_field_name],
recalculate: options[:recalculate],
}
self.delay_for(1.second).update_cached_field_bg(name, obj, ids, callback, index_options)
self.delay_for(1.second).update_cached_field_bg(name, ids, callback, index_options, obj.class.name, obj.id, event)
end
end

def update_cached_field_bg(name, obj, ids, callback, options)
recalculate = options[:recalculate]
self.where(id: ids).each do |target|
value = callback == :recalculate ? target.send(recalculate) : obj.send(callback, target)
Rails.cache.write(self.check_cache_key(self, target.id, name), value, expires_in: self.cached_field_expiration(options))
# Update ES index and PG, if needed
self.index_and_pg_cached_field(options, value, name, target)
def update_cached_field_bg(name, ids, callback, options, klass, id, event)
obj = event == 'destroy' ? klass.constantize : klass.constantize.find_by_id(id)
unless obj.nil?
recalculate = options[:recalculate]
self.where(id: ids).each do |target|
value = callback == :recalculate ? target.send(recalculate) : obj.send(callback, target)
Rails.cache.write(self.check_cache_key(self, target.id, name), value, expires_in: self.cached_field_expiration(options))
# Update ES index and PG, if needed
self.index_and_pg_cached_field(options, value, name, target)
end
end
end
end
Expand Down
6 changes: 3 additions & 3 deletions app/lib/check_elastic_search.rb
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def update_elasticsearch_doc_bg(options)
create_doc_if_not_exists(options)
sleep 1
client = $repository.client
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id], retry_on_conflict: 3, body: { doc: fields }
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id], body: { doc: fields }
end
end

Expand Down Expand Up @@ -98,7 +98,7 @@ def create_update_nested_obj_bg(options)
end
values = store_elasticsearch_data(options[:keys], options[:data])
client = $repository.client
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id], retry_on_conflict: 3,
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id],
body: { script: { source: source, params: { value: values, id: values['id'] } } }
end

Expand Down Expand Up @@ -178,7 +178,7 @@ def destroy_elasticsearch_doc_nested(options)
begin
client = $repository.client
source = "for (int i = 0; i < ctx._source.#{nested_type}.size(); i++) { if(ctx._source.#{nested_type}[i].id == params.id){ctx._source.#{nested_type}.remove(i);}}"
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id], retry_on_conflict: 3,
client.update index: CheckElasticSearchModel.get_index_alias, id: options[:doc_id],
body: { script: { source: source, params: { id: options[:model_id] } } }
rescue
Rails.logger.info "[ES destroy] doc with id #{options[:doc_id]} not exists"
Expand Down
18 changes: 6 additions & 12 deletions app/lib/smooch_nlu.rb
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,6 @@ def enabled?
end

def update_keywords(language, keywords, keyword, operation, doc_id, context)
alegre_operation = nil
alegre_params = nil
common_alegre_params = {
doc_id: doc_id,
context: {
Expand All @@ -44,15 +42,11 @@ def update_keywords(language, keywords, keyword, operation, doc_id, context)
}
if operation == 'add' && !keywords.include?(keyword)
keywords << keyword
alegre_operation = 'post'
alegre_params = common_alegre_params.merge({ text: keyword, models: ALEGRE_MODELS_AND_THRESHOLDS.keys })
Bot::Alegre.index_sync_with_params(common_alegre_params.merge({ text: keyword, models: ALEGRE_MODELS_AND_THRESHOLDS.keys }), "text")
elsif operation == 'remove'
keywords -= [keyword]
alegre_operation = 'delete'
alegre_params = common_alegre_params.merge({ quiet: true })
Bot::Alegre.request_delete_from_raw(common_alegre_params.merge({ quiet: true }), "text")
end
# FIXME: Add error handling and better logging
Bot::Alegre.request(alegre_operation, '/text/similarity/', alegre_params) if alegre_operation && alegre_params
keywords
end

Expand Down Expand Up @@ -91,19 +85,19 @@ def self.alegre_matches_from_message(message, language, context, alegre_result_k
language: language,
}.merge(context)
}
response = Bot::Alegre.request('post', '/text/similarity/search/', params)
response = Bot::Alegre.query_sync_with_params(params, "text")

# One approach would be to take the option that has the most matches
# Unfortunately this approach is influenced by the number of keywords per option
# So, we are not using this approach right now
# Get the `alegre_result_key` of all results returned
# option_counts = response['result'].to_a.map{|o| o.dig('_source', 'context', alegre_result_key)}
# option_counts = response['result'].to_a.map{|o| o.dig('context', alegre_result_key)}
# Count how many of each alegre_result_key we have and sort (high to low)
# ranked_options = option_counts.group_by(&:itself).transform_values(&:count).sort_by{|_k,v| v}.reverse()

# Second approach is to sort the results from best to worst
sorted_options = response['result'].to_a.sort_by{ |result| result['_score'] }.reverse
ranked_options = sorted_options.map{ |o| { 'key' => o.dig('_source', 'context', alegre_result_key), 'score' => o['_score'] } }
sorted_options = response['result'].to_a.sort_by{ |result| result['score'] }.reverse
ranked_options = sorted_options.map{ |o| { 'key' => o.dig('context', alegre_result_key), 'score' => o['score'] } }
matches = ranked_options

# In all cases log for analysis
Expand Down
17 changes: 9 additions & 8 deletions app/models/annotations/tag.rb
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,15 @@ def hit_nested_objects_limit?
end

def self.create_project_media_tags(project_media_id, tags_json)
project_media = ProjectMedia.find_by_id(project_media_id)

if !project_media.nil?
tags = JSON.parse(tags_json)
clean_tags(tags).each { |tag| Tag.create annotated: project_media, tag: tag.strip, skip_check_ability: true }
else
error = StandardError.new("[ProjectMedia] Exception creating project media's tags in background. Project media is nil.")
CheckSentry.notify(error, project_media_id: project_media_id)
tags = JSON.parse(tags_json).reject { |t| t.blank? }
unless tags.empty?
project_media = ProjectMedia.find_by_id(project_media_id)
if !project_media.nil?
clean_tags(tags).each { |tag| Tag.create annotated: project_media, tag: tag.strip, skip_check_ability: true }
else
error = StandardError.new("[ProjectMedia] Exception creating project media's tags in background. Project media is nil.")
CheckSentry.notify(error, project_media_id: project_media_id)
end
end
end

Expand Down
14 changes: 4 additions & 10 deletions app/models/bot/alegre.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def similar_items_ids_and_scores(team_ids, thresholds = {})
ALL_TEXT_SIMILARITY_FIELDS.each do |field|
text = self.send(field)
next if text.blank?
threads << Thread.new { ids_and_scores.merge!(Bot::Alegre.get_similar_texts(team_ids, text, Bot::Alegre::ALL_TEXT_SIMILARITY_FIELDS, thresholds[:text]).to_h) }
threads << Thread.new { ids_and_scores.merge!(Bot::Alegre.get_items_from_similar_text(team_ids, text, Bot::Alegre::ALL_TEXT_SIMILARITY_FIELDS, thresholds[:text]).to_h) }
end
threads.map(&:join)
end
Expand Down Expand Up @@ -155,10 +155,8 @@ def self.run(body)
if ['audio', 'image', 'video'].include?(self.get_pm_type(pm))
self.relate_project_media_async(pm)
else
Bot::Alegre.send_to_media_similarity_index(pm)
Bot::Alegre.send_field_to_similarity_index(pm, 'original_title')
Bot::Alegre.send_field_to_similarity_index(pm, 'original_description')
Bot::Alegre.relate_project_media_to_similar_items(pm)
self.relate_project_media_async(pm, 'original_title')
self.relate_project_media_async(pm, 'original_description')
end
self.get_extracted_text(pm)
self.get_flags(pm)
Expand Down Expand Up @@ -206,7 +204,7 @@ def self.get_items_from_similar_text(team_id, text, fields = nil, threshold = ni
threshold ||= self.get_threshold_for_query('text', nil, true)
models ||= [self.matching_model_to_use(team_ids)].flatten
Hash[self.get_similar_items_from_api(
'/text/similarity/search/',
'text',
self.similar_texts_from_api_conditions(text, models, fuzzy, team_ids, fields, threshold),
threshold
).collect{|k,v| [k, v.merge(model: v[:model]||Bot::Alegre.default_matching_model)]}]
Expand Down Expand Up @@ -722,8 +720,4 @@ def self.is_text_too_short?(pm, length_threshold)
is_short
end

class <<self
alias_method :get_similar_texts, :get_items_from_similar_text
end

end
4 changes: 2 additions & 2 deletions app/models/bot/smooch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,7 @@ def self.process_menu_option_value(value, option, message, language, workflow, a
def self.is_a_shortcut_for_submission?(state, message)
self.is_v2? && (state == 'main' || state == 'waiting_for_message') && (
!message['mediaUrl'].blank? ||
::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer) ||
::Bot::Alegre.get_number_of_words(message['text'].to_s) > self.min_number_of_words_for_tipline_long_text ||
!Twitter::TwitterText::Extractor.extract_urls(message['text'].to_s).blank? # URL in message?
)
end
Expand Down Expand Up @@ -851,7 +851,7 @@ def self.save_text_message(message)
extra = { quote: claim }
pm = ProjectMedia.joins(:media).where('trim(lower(quote)) = ?', claim.downcase).where('project_medias.team_id' => team.id).last
# Don't create a new text media if it's an unconfirmed request with just a few words
if pm.nil? && message['archived'] == CheckArchivedFlags::FlagCodes::UNCONFIRMED && ::Bot::Alegre.get_number_of_words(claim) < CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer)
if pm.nil? && message['archived'] == CheckArchivedFlags::FlagCodes::UNCONFIRMED && ::Bot::Alegre.get_number_of_words(claim) < self.min_number_of_words_for_tipline_long_text
return team
end
else
Expand Down
9 changes: 5 additions & 4 deletions app/models/claim_description.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class ClaimDescription < ApplicationRecord
validates_uniqueness_of :project_media_id, allow_nil: true
validate :cant_apply_article_to_item_if_article_is_in_the_trash
after_commit :update_fact_check, on: [:update]
after_update :update_report_status
after_update :update_report
after_update :reset_item_rating_if_removed
after_update :replace_media, unless: proc { |cd| cd.disable_replace_media }
after_update :migrate_claim_and_fact_check_logs, if: proc { |cd| cd.saved_change_to_project_media_id? && !cd.project_media_id.nil? }
Expand Down Expand Up @@ -69,16 +69,17 @@ def update_fact_check
end
end

# Pause report when claim/fact-check is removed
def update_report_status
# Pause and update report when claim/fact-check is removed
def update_report
if self.project_media_id.nil? && !self.project_media_id_before_last_save.nil?
# Update report status
# Update report status and text fields
pm = ProjectMedia.find(self.project_media_id_before_last_save)
report = Annotation.where(annotation_type: 'report_design', annotated_type: 'ProjectMedia', annotated_id: pm.id).last
unless report.nil?
report = report.load
data = report.data.clone.with_indifferent_access
data[:state] = 'paused'
data[:options] = data[:options].to_h.merge({ description: '', headline: '', title: '', text: '' })
report.data = data
report.save!
end
Expand Down
Loading

0 comments on commit d0a2298

Please sign in to comment.