Skip to content

Commit

Permalink
fix logger error and linting
Browse files Browse the repository at this point in the history
  • Loading branch information
kjgarza committed Jan 14, 2020
1 parent c7629ec commit 06b0f8d
Show file tree
Hide file tree
Showing 4 changed files with 59 additions and 57 deletions.
4 changes: 2 additions & 2 deletions app/jobs/report_import_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ class ReportImportJob < ActiveJob::Base
def perform(item, options={})
response = UsageUpdate.get_data(item, options)
if response.status != 200
Rails.logger.info "[Usage Report Parsing] Report #{item} not found"
return {}
Rails.logger.error "[Usage Report Parsing] Report #{item} not found"
{}
else
# report = Report.new(response, options)
Rails.logger.info "[Usage Report] Started to parse #{item}."
Expand Down
4 changes: 2 additions & 2 deletions app/jobs/usage_update_export_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def perform(item, options={})
Rails.logger.info "[Event Data] #{item['subj-id']} #{item['relation-type-id']} #{item['obj-id']} pushed to Event Data service for update."
elsif response.body["errors"].present?
Rails.logger.error "[Event Data] #{item['subj-id']} #{item['relation-type-id']} #{item['obj-id']} had an error: #{response.body['errors'].first['title']}"
Rails.error item.inspect
Rails.logger.error item.inspect
end
end
end
end
33 changes: 16 additions & 17 deletions app/jobs/usage_update_parse_job.rb
Original file line number Diff line number Diff line change
@@ -1,40 +1,39 @@
class UsageUpdateParseJob < ActiveJob::Base
queue_as :levriero

ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png"
ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png".freeze

def perform(dataset, options)
# response = UsageUpdate.get_data(report_url, options)
# report = Report.new(report_header, options)
data = Report.translate_datasets dataset, options
# data = Report.new(response, options).parse_data
send_message(data,options[:url],{slack_webhook_url: ENV['SLACK_WEBHOOK_URL']})
send_message(data, options[:url], slack_webhook_url: ENV["SLACK_WEBHOOK_URL"])
options.merge(
report_meta: {
report_id: options[:header].dig("report-id"),
report_id: options[:header].dig("report-id"),
created_by: options[:header].dig("created-by"),
reporting_period: options[:header].dig("reporting-period"),
})
},
)

UsageUpdate.push_datasets(data, options) unless Rails.env.test?
end

def send_message data, item, options={}
errors = data.select {|hsh| hsh.fetch("errors",nil) }
def send_message(data, item, _options = {})
errors = data.select { |hsh| hsh.fetch("errors", nil) }
if data.length.zero?
options[:level] = "warning"
text = "[Usage Report Parsing] Error parsing Report #{item}. Report is empty"
# options[:level] = "warning"
Rails.logger.error "[Usage Report Parsing] Error parsing Report #{item}. Report is empty"
elsif !errors.empty?
options[:level] = "warning"
text = "[Usage Report Parsing] #{errors.length} Errors in report #{item}. #{errors}"
elsif data.respond_to?("each").nil?
options[:level] = "danger"
text = "[Usage Report Parsing] Something went wrong with #{item}."
# options[:level] = "warning"
Rails.logger.error "[Usage Report Parsing] #{errors.length} Errors in report #{item}. #{errors}"
elsif data.respond_to?("each").nil?
# options[:level] = "danger"
Rails.logger.fatal "[Usage Report Parsing] Something went wrong with #{item}."
else
options[:level] = "good"
text = "[Usage Report Parsing] Successfully parsed Report #{item} with #{data.length} instances"
# options[:level] = "good"
Rails.logger.info "[Usage Report Parsing] Successfully parsed Report #{item} with #{data.length} instances"
end

Rails.logger.info text
end
end
75 changes: 39 additions & 36 deletions app/models/usage_update.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
require 'digest'
require "digest"

class UsageUpdate < Base
LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze

USAGE_RELATIONS = [
"total-dataset-investigations-regular",
Expand All @@ -11,34 +11,35 @@ class UsageUpdate < Base
"unique-dataset-investigations-regular",
"unique-dataset-investigations-machine",
"unique-dataset-requests-machine",
"unique-dataset-requests-regular"
]
"unique-dataset-requests-regular",
].freeze

RESOLUTION_RELATIONS = [
"total-resolutions-regular",
"total-resolutions-machine",
"unique-resolutions-machine",
"unique-resolutions-regular"
]
"unique-resolutions-regular",
].freeze

def self.import(_options={})
def self.import(_options = {})
usage_update = UsageUpdate.new
usage_update.queue_jobs
usage_update.queue_jobs
end

def self.redirect(response, options={})
def self.redirect(response, options = {})
report = Report.new(response, options)
text = "[Usage Report] Started to parse #{report.report_url}."
Rails.logger.info text
# args = {header: report.header, url: report.report_url}
case report.get_type
when "normal" then Report.parse_normal_report(report)
when "compressed" then Report.parse_multi_subset_report(report)
when "normal" then Report.parse_normal_report(report)
when "compressed" then Report.parse_multi_subset_report(report)
end
end

def self.get_data(report_url, _options={})
return OpenStruct.new(body: { "errors" => "No Report given given"}) if report_url.blank?
def self.get_data(report_url, _options = {})
return OpenStruct.new(body: { "errors" => "No Report given given" }) if report_url.blank?

host = URI.parse(report_url).host.downcase
report = Maremma.get(report_url, timeout: 120, host: host)
report
Expand All @@ -53,72 +54,71 @@ def sqs
Aws::SQS::Client.new(region: ENV["AWS_REGION"])
end

def self.format_event(type, data, options={})
def self.format_event(type, data, _options = {})
# TODO: error class for fail and proper error handling
fail "Not type given. Report #{data[:report_url]} not proccessed" if type.blank?
fail "Report_id is missing" if data[:report_url].blank?

if USAGE_RELATIONS.include?(type.downcase)
source_id = "datacite-usage"
source_token = ENV['DATACITE_USAGE_SOURCE_TOKEN']
source_token = ENV["DATACITE_USAGE_SOURCE_TOKEN"]
elsif RESOLUTION_RELATIONS.include?(type.downcase)
source_id = "datacite-resolution"
source_token = ENV['DATACITE_RESOLUTION_SOURCE_TOKEN']
source_token = ENV["DATACITE_RESOLUTION_SOURCE_TOKEN"]
end

{ "message-action" => "create",
"subj-id" => data[:report_url],
"subj"=> {
"id"=> data[:report_url],
"issued"=> data[:created]
"subj" => {
"id" => data[:report_url],
"issued" => data[:created],
},
"total"=> data[:count],
"total" => data[:count],
"obj-id" => data[:id],
"relation-type-id" => type,
"source-id" => source_id,
"source-token" => source_token,
"occurred-at" => data[:created_at],
"license" => LICENSE
}
"license" => LICENSE }
end

def self.push_datasets items, options={}
def self.push_datasets(items, options = {})
if items.empty?
Rails.logger.info "No works found in the Queue."
Rails.logger.warn "No works found in the Queue."
else
Array.wrap(items).map do |item|
UsageUpdateExportJob.perform_later(item.to_json, options)
end
end
end

def self.push_item(item, options={})
def self.push_item(item, options = {})
item = JSON.parse(item)

if item["subj-id"].blank?
Rails.logger.info OpenStruct.new(body: { "errors" => [{ "title" => "There is no Subject" }] })
Rails.logger.error OpenStruct.new(body: { "errors" => [{ "title" => "There is no Subject" }] })
return
elsif ENV['LAGOTTINO_TOKEN'].blank?
Rails.logger.info OpenStruct.new(body: { "errors" => [{ "title" => "Access token missing." }] })
elsif ENV["LAGOTTINO_TOKEN"].blank?
Rails.logger.error OpenStruct.new(body: { "errors" => [{ "title" => "Access token missing." }] })
return
elsif item["errors"].present?
Rails.logger.info OpenStruct.new(body: { "errors" => [{ "title" => "#{item["errors"]["title"]}" }] })
Rails.logger.error OpenStruct.new(body: { "errors" => [{ "title" => (item["errors"]["title"]).to_s }] })
return
end

data = wrap_event item, options
push_url = ENV['LAGOTTINO_URL'] + "/events"
push_url = ENV["LAGOTTINO_URL"] + "/events"

response = Maremma.post(push_url, data: data.to_json,
bearer: ENV['LAGOTTINO_TOKEN'],
content_type: 'application/vnd.api+json',
accept: 'application/vnd.api+json; version=2')
bearer: ENV["LAGOTTINO_TOKEN"],
content_type: "application/vnd.api+json",
accept: "application/vnd.api+json; version=2")
end

def self.wrap_event(item, options={})
def self.wrap_event(item, options = {})
obj = cached_datacite_response(item["obj-id"])
subj = options[:report_meta]
{
{
"data" => {
"type" => "events",
"attributes" => {
Expand All @@ -133,6 +133,9 @@ def self.wrap_event(item, options={})
"timestamp" => item["timestamp"],
"license" => item["license"],
"subj" => subj,
"obj" => obj } }}
"obj" => obj,
},
},
}
end
end

0 comments on commit 06b0f8d

Please sign in to comment.