diff --git a/.rubocop.yml b/.rubocop.yml
index 946318c8..23560c32 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -655,4 +655,143 @@ Rails/TimeZone:
 
 Rails/Validation:
   Description: 'Use validates :attribute, hash of validations.'
-  Enabled: false
\ No newline at end of file
+  Enabled: false
+
+Gemspec/DateAssignment: # (new in 1.10)
+  Enabled: true
+Layout/SpaceBeforeBrackets: # (new in 1.7)
+  Enabled: true
+Lint/AmbiguousAssignment: # (new in 1.7)
+  Enabled: true
+Lint/DeprecatedConstants: # (new in 1.8)
+  Enabled: true
+Lint/DuplicateBranch: # (new in 1.3)
+  Enabled: true
+Lint/DuplicateRegexpCharacterClassElement: # (new in 1.1)
+  Enabled: true
+Lint/EmptyBlock: # (new in 1.1)
+  Enabled: true
+Lint/EmptyClass: # (new in 1.3)
+  Enabled: true
+Lint/EmptyInPattern: # (new in 1.16)
+  Enabled: true
+Lint/LambdaWithoutLiteralBlock: # (new in 1.8)
+  Enabled: true
+Lint/NoReturnInBeginEndBlocks: # (new in 1.2)
+  Enabled: true
+Lint/NumberedParameterAssignment: # (new in 1.9)
+  Enabled: true
+Lint/OrAssignmentToConstant: # (new in 1.9)
+  Enabled: true
+Lint/RedundantDirGlobSort: # (new in 1.8)
+  Enabled: true
+Lint/SymbolConversion: # (new in 1.9)
+  Enabled: true
+Lint/ToEnumArguments: # (new in 1.1)
+  Enabled: true
+Lint/TripleQuotes: # (new in 1.9)
+  Enabled: true
+Lint/UnexpectedBlockArity: # (new in 1.5)
+  Enabled: true
+Lint/UnmodifiedReduceAccumulator: # (new in 1.1)
+  Enabled: true
+Style/ArgumentsForwarding: # (new in 1.1)
+  Enabled: true
+Style/CollectionCompact: # (new in 1.2)
+  Enabled: true
+Style/DocumentDynamicEvalDefinition: # (new in 1.1)
+  Enabled: true
+Style/EndlessMethod: # (new in 1.8)
+  Enabled: true
+Style/HashConversion: # (new in 1.10)
+  Enabled: true
+Style/HashExcept: # (new in 1.7)
+  Enabled: true
+Style/IfWithBooleanLiteralBranches: # (new in 1.9)
+  Enabled: true
+Style/InPatternThen: # (new in 1.16)
+  Enabled: true
+Style/MultilineInPatternThen: # (new in 1.16)
+  Enabled: true
+Style/NegatedIfElseCondition: # (new in 1.2)
+  Enabled: true
+Style/NilLambda: # (new in 1.3)
+  Enabled: true
+Style/QuotedSymbols: # (new in 1.16)
+  Enabled: true
+Style/RedundantArgument: # (new in 1.4)
+  Enabled: true
+Style/StringChars: # (new in 1.12)
+  Enabled: true
+Style/SwapValues: # (new in 1.1)
+  Enabled: true
+Rails/ActiveRecordCallbacksOrder: # (new in 2.7)
+  Enabled: true
+Rails/AfterCommitOverride: # (new in 2.8)
+  Enabled: true
+Rails/AttributeDefaultBlockValue: # (new in 2.9)
+  Enabled: true
+Rails/EnvironmentVariableAccess: # (new in 2.10)
+  Enabled: true
+Rails/FindById: # (new in 2.7)
+  Enabled: true
+Rails/Inquiry: # (new in 2.7)
+  Enabled: true
+Rails/MailerName: # (new in 2.7)
+  Enabled: true
+Rails/MatchRoute: # (new in 2.7)
+  Enabled: true
+Rails/NegateInclude: # (new in 2.7)
+  Enabled: true
+Rails/Pluck: # (new in 2.7)
+  Enabled: true
+Rails/PluckInWhere: # (new in 2.7)
+  Enabled: true
+Rails/RenderInline: # (new in 2.7)
+  Enabled: true
+Rails/RenderPlainText: # (new in 2.7)
+  Enabled: true
+Rails/ShortI18n: # (new in 2.7)
+  Enabled: true
+Rails/SquishedSQLHeredocs: # (new in 2.8)
+  Enabled: true
+Rails/TimeZoneAssignment: # (new in 2.10)
+  Enabled: true
+Rails/WhereEquals: # (new in 2.9)
+  Enabled: true
+Rails/WhereExists: # (new in 2.7)
+  Enabled: true
+Rails/WhereNot: # (new in 2.8)
+  Enabled: true
+Performance/AncestorsInclude: # (new in 1.7)
+  Enabled: true
+Performance/BigDecimalWithNumericArgument: # (new in 1.7)
+  Enabled: true
+Performance/BlockGivenWithExplicitBlock: # (new in 1.9)
+  Enabled: true
+Performance/CollectionLiteralInLoop: # (new in 1.8)
+  Enabled: true
+Performance/ConstantRegexp: # (new in 1.9)
+  Enabled: true
+Performance/MapCompact: # (new in 1.11)
+  Enabled: true
+Performance/MethodObjectAsBlock: # (new in 1.9)
+  Enabled: true
+Performance/RedundantEqualityComparisonBlock: # (new in 1.10)
+  Enabled: true
+Performance/RedundantSortBlock: # (new in 1.7)
+  Enabled: true
+Performance/RedundantSplitRegexpArgument: # (new in 1.10)
+  Enabled: true
+Performance/RedundantStringChars: # (new in 1.7)
+  Enabled: true
+Performance/ReverseFirst: # (new in 1.7)
+  Enabled: true
+Performance/SortReverse: # (new in 1.7)
+  Enabled: true
+Performance/Squeeze: # (new in 1.7)
+  Enabled: true
+Performance/StringInclude: # (new in 1.7)
+  Enabled: true
+Performance/Sum: # (new in 1.8)
+  Enabled: true
\ No newline at end of file
diff --git a/app/controllers/agents_controller.rb b/app/controllers/agents_controller.rb
index f686988b..b5ff6de0 100644
--- a/app/controllers/agents_controller.rb
+++ b/app/controllers/agents_controller.rb
@@ -5,34 +5,39 @@ def crossref
     authorize! :import, Crossref
     total = Crossref.import
 
-    render json: { message: "[Crossref Agent] Queued import for #{total} DOIs." }.to_json, status: :ok
+    render json: { message: "[Crossref Agent] Queued import for #{total} DOIs." }.to_json,
+           status: :ok
   end
 
   def crossref_orcid
     authorize! :import, CrossrefOrcid
     total = CrossrefOrcid.import
 
-    render json: { message: "[Crossref-ORCID Agent] Queued import for #{total} DOIs." }.to_json, status: :ok
+    render json: { message: "[Crossref-ORCID Agent] Queued import for #{total} DOIs." }.to_json,
+           status: :ok
   end
 
   def crossref_funder
     authorize! :import, CrossrefFunder
     total = CrossrefFunder.import
 
-    render json: { message: "[Crossref-Funder Agent] Queued import for #{total} DOIs." }.to_json, status: :ok
+    render json: { message: "[Crossref-Funder Agent] Queued import for #{total} DOIs." }.to_json,
+           status: :ok
   end
 
   def crossref_related
     authorize! :import, CrossrefRelated
     total = CrossrefRelated.import
 
-    render json: { message: "[Crossref-Related Agent] Queued import for #{total} DOIs." }.to_json, status: :ok
+    render json: { message: "[Crossref-Related Agent] Queued import for #{total} DOIs." }.to_json,
+           status: :ok
   end
 
   def crossref_import
     authorize! :import, CrossrefImport
     total = CrossrefImport.import
 
-    render json: { message: "[Crossref-Import Agent] Queued import for #{total} DOIs." }.to_json, status: :ok
+    render json: { message: "[Crossref-Import Agent] Queued import for #{total} DOIs." }.to_json,
+           status: :ok
   end
 end
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb
index 5dc408a5..05d86ce8 100644
--- a/app/controllers/application_controller.rb
+++ b/app/controllers/application_controller.rb
@@ -2,7 +2,7 @@ class ApplicationController < ActionController::API
   include Authenticable
   include CanCan::ControllerAdditions
   include ErrorSerializable
-  require 'facets/string/snakecase'
+  require "facets/string/snakecase"
 
   # include helper module for caching infrequently changing resources
   include Cacheable
@@ -20,15 +20,15 @@ class ApplicationController < ActionController::API
   def set_jsonp_format
     if params[:callback] && request.get?
       self.response_body = "#{params[:callback]}(#{response.body})"
-      headers["Content-Type"] = 'application/javascript'
+      headers["Content-Type"] = "application/javascript"
     end
   end
 
   def set_consumer_header
     if current_user
-      response.headers['X-Credential-Username'] = current_user.uid
+      response.headers["X-Credential-Username"] = current_user.uid
     else
-      response.headers['X-Anonymous-Consumer'] = true
+      response.headers["X-Anonymous-Consumer"] = true
     end
   end
 
@@ -36,15 +36,15 @@ def default_format_json
     request.format = :json if request.format.html?
   end
 
-  #convert parameters with hyphen to parameters with underscore.
+  # convert parameters with hyphen to parameters with underscore.
   # https://stackoverflow.com/questions/35812277/fields-parameters-with-hyphen-in-ruby-on-rails
   def transform_params
-    params.transform_keys! { |key| key.tr('-', '_') }
+    params.transform_keys! { |key| key.tr("-", "_") }
   end
 
   def authenticate_user_from_token!
     token = token_from_request_headers
-    return false unless token.present?
+    return false if token.blank?
 
     @current_user = User.new(token)
   end
@@ -55,35 +55,35 @@ def current_ability
 
   # from https://github.com/nsarno/knock/blob/master/lib/knock/authenticable.rb
   def token_from_request_headers
-    unless request.headers['Authorization'].nil?
-      request.headers['Authorization'].split.last
-    end
+    request.headers["Authorization"]&.split&.last
   end
 
   unless Rails.env.development?
     rescue_from *RESCUABLE_EXCEPTIONS do |exception|
       status = case exception.class.to_s
                when "CanCan::AccessDenied", "JWT::DecodeError" then 401
-               when "Elasticsearch::Transport::Transport::Errors::NotFound","AbstractController::ActionNotFound",  "ActionController::RoutingError" then 404
+               when "Elasticsearch::Transport::Transport::Errors::NotFound", "AbstractController::ActionNotFound", "ActionController::RoutingError" then 404
                when "ActiveModel::ForbiddenAttributesError", "ActionController::ParameterMissing", "ActionController::UnpermittedParameters", "NoMethodError" then 422
                else 400
                end
 
-      if status == 404
-        message = "The resource you are looking for doesn't exist."
-      elsif status == 401
-        message = "You are not authorized to access this page."
-      else
-        message = exception.message
-      end
-
-      render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status
+      message = case status
+                when 404
+                  "The resource you are looking for doesn't exist."
+                when 401
+                  "You are not authorized to access this page."
+                else
+                  exception.message
+                end
+
+      render json: { errors: [{ status: status.to_s, title: message }] }.to_json,
+             status: status
     end
   end
 
   protected
 
   def is_admin_or_staff?
-    current_user && current_user.is_admin_or_staff? ? 1 : 0
+    current_user&.is_admin_or_staff? ? 1 : 0
   end
 end
diff --git a/app/controllers/concerns/delegatable.rb b/app/controllers/concerns/delegatable.rb
index ac8c9310..9822fcdf 100644
--- a/app/controllers/concerns/delegatable.rb
+++ b/app/controllers/concerns/delegatable.rb
@@ -3,11 +3,13 @@ module Delegatable
 
   included do
     def dois_count(uid, **options)
-      Rails.cache.fetch("dois_count/#{uid}", expires_in: 6.hours, force: options[:force]) do
-        if self.is_a?(ClientsController)
-          response = Maremma.get(ENV['API_URL'] + "/clients/" + uid)
-        elsif self.is_a?(ProvidersController)
-          response = Maremma.get(ENV['API_URL'] + "/providers/" + uid)
+      Rails.cache.fetch("dois_count/#{uid}", expires_in: 6.hours,
+                                             force: options[:force]) do
+        case self
+        when ClientsController
+          response = Maremma.get("#{ENV['API_URL']}/clients/#{uid}")
+        when ProvidersController
+          response = Maremma.get("#{ENV['API_URL']}/providers/#{uid}")
         end
         response.body.to_h.dig("meta", "dois")
       end
@@ -23,7 +25,6 @@ def dois_count(uid, **options)
     #   response.body["meta"]["prefixes"]
     # end
 
-
     # def repository_count uid
     #   if self.is_a?(ClientsController)
     #     response = Maremma.get(ENV['API_URL']+"/clients/"+uid)
@@ -33,6 +34,5 @@ def dois_count(uid, **options)
 
     #   response.body["meta"]["repositories"]
     # end
-
   end
 end
diff --git a/app/controllers/concerns/facetable.rb b/app/controllers/concerns/facetable.rb
index 1632cd91..88c6f645 100644
--- a/app/controllers/concerns/facetable.rb
+++ b/app/controllers/concerns/facetable.rb
@@ -13,7 +13,7 @@ def facet_by_year(arr)
     def facet_by_provider(arr)
       # generate hash with id and name for each provider in facet
       ids = arr.map { |hsh| hsh["key"] }.join(",")
-      providers = Provider.find_by_ids(ids).to_a.reduce({}) do |sum, p|
+      providers = Provider.find_by(ids: ids).to_a.reduce({}) do |sum, p|
         sum[p.id] = p.name
         sum
       end
diff --git a/app/controllers/heartbeat_controller.rb b/app/controllers/heartbeat_controller.rb
index 03f31ff9..6681c323 100644
--- a/app/controllers/heartbeat_controller.rb
+++ b/app/controllers/heartbeat_controller.rb
@@ -1,6 +1,7 @@
 class HeartbeatController < ApplicationController
   def index
     heartbeat = Heartbeat.new
-    render plain: heartbeat.string, status: heartbeat.status, content_type: "text/plain"
+    render plain: heartbeat.string, status: heartbeat.status,
+           content_type: "text/plain"
   end
 end
diff --git a/app/controllers/index_controller.rb b/app/controllers/index_controller.rb
index 09899e9f..96a6ff0d 100644
--- a/app/controllers/index_controller.rb
+++ b/app/controllers/index_controller.rb
@@ -1,5 +1,5 @@
 class IndexController < ApplicationController
   def index
-    render plain: ENV['SITE_TITLE']
+    render plain: ENV["SITE_TITLE"]
   end
 end
diff --git a/app/jobs/affiliation_identifier_import_by_month_job.rb b/app/jobs/affiliation_identifier_import_by_month_job.rb
index b2368a7a..75318958 100644
--- a/app/jobs/affiliation_identifier_import_by_month_job.rb
+++ b/app/jobs/affiliation_identifier_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class AffiliationIdentifierImportByMonthJob < ActiveJob::Base
+class AffiliationIdentifierImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     AffiliationIdentifier.import(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/affiliation_identifier_import_job.rb b/app/jobs/affiliation_identifier_import_job.rb
index a6d175fa..81b7545c 100644
--- a/app/jobs/affiliation_identifier_import_job.rb
+++ b/app/jobs/affiliation_identifier_import_job.rb
@@ -1,7 +1,7 @@
-class AffiliationIdentifierImportJob < ActiveJob::Base
+class AffiliationIdentifierImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
     AffiliationIdentifier.push_item(item)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/application_job.rb b/app/jobs/application_job.rb
new file mode 100644
index 00000000..82047f33
--- /dev/null
+++ b/app/jobs/application_job.rb
@@ -0,0 +1,3 @@
+# frozen_string_literal: true
+
+class ApplicationJob < ActiveJob::Base; end
diff --git a/app/jobs/crossref_funder_import_by_month_job.rb b/app/jobs/crossref_funder_import_by_month_job.rb
index 0d4c1712..08fff363 100644
--- a/app/jobs/crossref_funder_import_by_month_job.rb
+++ b/app/jobs/crossref_funder_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class CrossrefFunderImportByMonthJob < ActiveJob::Base
+class CrossrefFunderImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     CrossrefFunder.import(options)
   end
 end
diff --git a/app/jobs/crossref_funder_import_job.rb b/app/jobs/crossref_funder_import_job.rb
index 96d64acb..8ea93227 100644
--- a/app/jobs/crossref_funder_import_job.rb
+++ b/app/jobs/crossref_funder_import_job.rb
@@ -1,4 +1,4 @@
-class CrossrefFunderImportJob < ActiveJob::Base
+class CrossrefFunderImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/crossref_import_by_month_job.rb b/app/jobs/crossref_import_by_month_job.rb
index d173f363..7e193430 100644
--- a/app/jobs/crossref_import_by_month_job.rb
+++ b/app/jobs/crossref_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class CrossrefImportByMonthJob < ActiveJob::Base
+class CrossrefImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     Crossref.import(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/crossref_import_import_by_month_job.rb b/app/jobs/crossref_import_import_by_month_job.rb
index 35be063d..a3bae641 100644
--- a/app/jobs/crossref_import_import_by_month_job.rb
+++ b/app/jobs/crossref_import_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class CrossrefImportImportByMonthJob < ActiveJob::Base
+class CrossrefImportImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     CrossrefImport.import(options)
   end
 end
diff --git a/app/jobs/crossref_import_import_job.rb b/app/jobs/crossref_import_import_job.rb
index 066863d2..36cbb7f1 100644
--- a/app/jobs/crossref_import_import_job.rb
+++ b/app/jobs/crossref_import_import_job.rb
@@ -1,4 +1,4 @@
-class CrossrefImportImportJob < ActiveJob::Base
+class CrossrefImportImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/crossref_import_job.rb b/app/jobs/crossref_import_job.rb
index e72593c4..f94caa7e 100644
--- a/app/jobs/crossref_import_job.rb
+++ b/app/jobs/crossref_import_job.rb
@@ -1,4 +1,4 @@
-class CrossrefImportJob < ActiveJob::Base
+class CrossrefImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/crossref_orcid_import_by_month_job.rb b/app/jobs/crossref_orcid_import_by_month_job.rb
index 743bb0e6..fc114b1b 100644
--- a/app/jobs/crossref_orcid_import_by_month_job.rb
+++ b/app/jobs/crossref_orcid_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class CrossrefOrcidImportByMonthJob < ActiveJob::Base
+class CrossrefOrcidImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     CrossrefOrcid.import(options)
   end
 end
diff --git a/app/jobs/crossref_orcid_import_job.rb b/app/jobs/crossref_orcid_import_job.rb
index 2812f293..d1a122f2 100644
--- a/app/jobs/crossref_orcid_import_job.rb
+++ b/app/jobs/crossref_orcid_import_job.rb
@@ -1,4 +1,4 @@
-class CrossrefOrcidImportJob < ActiveJob::Base
+class CrossrefOrcidImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/crossref_related_import_by_month_job.rb b/app/jobs/crossref_related_import_by_month_job.rb
index 3b0d7f0e..0e0ef894 100644
--- a/app/jobs/crossref_related_import_by_month_job.rb
+++ b/app/jobs/crossref_related_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class CrossrefRelatedImportByMonthJob < ActiveJob::Base
+class CrossrefRelatedImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     CrossrefRelated.import(options)
   end
 end
diff --git a/app/jobs/crossref_related_import_job.rb b/app/jobs/crossref_related_import_job.rb
index 73730370..664590c6 100644
--- a/app/jobs/crossref_related_import_job.rb
+++ b/app/jobs/crossref_related_import_job.rb
@@ -1,4 +1,4 @@
-class CrossrefRelatedImportJob < ActiveJob::Base
+class CrossrefRelatedImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/funder_identifier_import_by_month_job.rb b/app/jobs/funder_identifier_import_by_month_job.rb
index 5c74ebc8..babaebc8 100644
--- a/app/jobs/funder_identifier_import_by_month_job.rb
+++ b/app/jobs/funder_identifier_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class FunderIdentifierImportByMonthJob < ActiveJob::Base
+class FunderIdentifierImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     FunderIdentifier.import(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/funder_identifier_import_job.rb b/app/jobs/funder_identifier_import_job.rb
index 3fe7b0e1..f13827c0 100644
--- a/app/jobs/funder_identifier_import_job.rb
+++ b/app/jobs/funder_identifier_import_job.rb
@@ -1,7 +1,7 @@
-class FunderIdentifierImportJob < ActiveJob::Base
+class FunderIdentifierImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
     FunderIdentifier.push_item(item)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/import_job.rb b/app/jobs/import_job.rb
index 7071366c..ac18905d 100644
--- a/app/jobs/import_job.rb
+++ b/app/jobs/import_job.rb
@@ -1,8 +1,8 @@
-class ImportJob < ActiveJob::Base
+class ImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(data)
-    klass = Kernel.const_get(data.fetch("type").chomp('s').capitalize)
+    klass = Kernel.const_get(data.fetch("type").chomp("s").capitalize)
     klass.import_record(data)
   end
 end
diff --git a/app/jobs/name_identifier_import_by_month_job.rb b/app/jobs/name_identifier_import_by_month_job.rb
index 18403abe..ff0319df 100644
--- a/app/jobs/name_identifier_import_by_month_job.rb
+++ b/app/jobs/name_identifier_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class NameIdentifierImportByMonthJob < ActiveJob::Base
+class NameIdentifierImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     NameIdentifier.import(options)
   end
 end
diff --git a/app/jobs/name_identifier_import_job.rb b/app/jobs/name_identifier_import_job.rb
index 6c60609f..a7ec52d2 100644
--- a/app/jobs/name_identifier_import_job.rb
+++ b/app/jobs/name_identifier_import_job.rb
@@ -1,4 +1,4 @@
-class NameIdentifierImportJob < ActiveJob::Base
+class NameIdentifierImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/orcid_affiliation_import_by_month_job.rb b/app/jobs/orcid_affiliation_import_by_month_job.rb
index fa420f4a..f48c644c 100644
--- a/app/jobs/orcid_affiliation_import_by_month_job.rb
+++ b/app/jobs/orcid_affiliation_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class OrcidAffiliationImportByMonthJob < ActiveJob::Base
+class OrcidAffiliationImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     OrcidAffiliation.import(options)
   end
 end
diff --git a/app/jobs/orcid_affiliation_import_job.rb b/app/jobs/orcid_affiliation_import_job.rb
index 7eb5c464..0d4df4bb 100644
--- a/app/jobs/orcid_affiliation_import_job.rb
+++ b/app/jobs/orcid_affiliation_import_job.rb
@@ -1,4 +1,4 @@
-class OrcidAffiliationImportJob < ActiveJob::Base
+class OrcidAffiliationImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/orcid_claim_import_by_month_job.rb b/app/jobs/orcid_claim_import_by_month_job.rb
index dad37bb1..7ed1e2d6 100644
--- a/app/jobs/orcid_claim_import_by_month_job.rb
+++ b/app/jobs/orcid_claim_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class OrcidClaimImportByMonthJob < ActiveJob::Base
+class OrcidClaimImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     OrcidClaim.import(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/orcid_claim_import_job.rb b/app/jobs/orcid_claim_import_job.rb
index c22a3e10..2c0ce703 100644
--- a/app/jobs/orcid_claim_import_job.rb
+++ b/app/jobs/orcid_claim_import_job.rb
@@ -1,7 +1,7 @@
-class OrcidClaimImportJob < ActiveJob::Base
+class OrcidClaimImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
     OrcidClaim.push_item(item)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/related_arxiv_import_by_month_job.rb b/app/jobs/related_arxiv_import_by_month_job.rb
index fbb5f441..35a7824e 100644
--- a/app/jobs/related_arxiv_import_by_month_job.rb
+++ b/app/jobs/related_arxiv_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedArxivImportByMonthJob < ActiveJob::Base
+class RelatedArxivImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedArxiv.import(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/related_arxiv_import_job.rb b/app/jobs/related_arxiv_import_job.rb
index c16c2694..b61b9645 100644
--- a/app/jobs/related_arxiv_import_job.rb
+++ b/app/jobs/related_arxiv_import_job.rb
@@ -1,7 +1,7 @@
-class RelatedArxivImportJob < ActiveJob::Base
+class RelatedArxivImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
     RelatedArxiv.push_item(item)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/related_handle_import_by_month_job.rb b/app/jobs/related_handle_import_by_month_job.rb
index 707dcf5f..6d8d7801 100644
--- a/app/jobs/related_handle_import_by_month_job.rb
+++ b/app/jobs/related_handle_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedHandleImportByMonthJob < ActiveJob::Base
+class RelatedHandleImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedHandle.import(options)
   end
 end
diff --git a/app/jobs/related_handle_import_job.rb b/app/jobs/related_handle_import_job.rb
index 51eedc13..23011e4a 100644
--- a/app/jobs/related_handle_import_job.rb
+++ b/app/jobs/related_handle_import_job.rb
@@ -1,4 +1,4 @@
-class RelatedHandleImportJob < ActiveJob::Base
+class RelatedHandleImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/related_identifier_import_by_month_job.rb b/app/jobs/related_identifier_import_by_month_job.rb
index 5e763546..707d054c 100644
--- a/app/jobs/related_identifier_import_by_month_job.rb
+++ b/app/jobs/related_identifier_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedIdentifierImportByMonthJob < ActiveJob::Base
+class RelatedIdentifierImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedIdentifier.import(options)
   end
 end
diff --git a/app/jobs/related_identifier_import_job.rb b/app/jobs/related_identifier_import_job.rb
index 955d3aef..473f8f59 100644
--- a/app/jobs/related_identifier_import_job.rb
+++ b/app/jobs/related_identifier_import_job.rb
@@ -1,4 +1,4 @@
-class RelatedIdentifierImportJob < ActiveJob::Base
+class RelatedIdentifierImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/related_igsn_import_by_month_job.rb b/app/jobs/related_igsn_import_by_month_job.rb
index aefc94f4..4d53bd0a 100644
--- a/app/jobs/related_igsn_import_by_month_job.rb
+++ b/app/jobs/related_igsn_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedIgsnImportByMonthJob < ActiveJob::Base
+class RelatedIgsnImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedIgsn.import(options)
   end
 end
diff --git a/app/jobs/related_igsn_import_job.rb b/app/jobs/related_igsn_import_job.rb
index 308f6d5b..8fe7cf1c 100644
--- a/app/jobs/related_igsn_import_job.rb
+++ b/app/jobs/related_igsn_import_job.rb
@@ -1,4 +1,4 @@
-class RelatedIgsnImportJob < ActiveJob::Base
+class RelatedIgsnImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/related_pmid_import_by_month_job.rb b/app/jobs/related_pmid_import_by_month_job.rb
index b75082fd..51151d31 100644
--- a/app/jobs/related_pmid_import_by_month_job.rb
+++ b/app/jobs/related_pmid_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedPmidImportByMonthJob < ActiveJob::Base
+class RelatedPmidImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedPmid.import(options)
   end
 end
diff --git a/app/jobs/related_pmid_import_job.rb b/app/jobs/related_pmid_import_job.rb
index 5ba13414..86657067 100644
--- a/app/jobs/related_pmid_import_job.rb
+++ b/app/jobs/related_pmid_import_job.rb
@@ -1,4 +1,4 @@
-class RelatedPmidImportJob < ActiveJob::Base
+class RelatedPmidImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/related_url_import_by_month_job.rb b/app/jobs/related_url_import_by_month_job.rb
index d36d713f..b1a06bc0 100644
--- a/app/jobs/related_url_import_by_month_job.rb
+++ b/app/jobs/related_url_import_by_month_job.rb
@@ -1,7 +1,7 @@
-class RelatedUrlImportByMonthJob < ActiveJob::Base
+class RelatedUrlImportByMonthJob < ApplicationJob
   queue_as :levriero
 
-  def perform(options={})
+  def perform(options = {})
     RelatedUrl.import(options)
   end
 end
diff --git a/app/jobs/related_url_import_job.rb b/app/jobs/related_url_import_job.rb
index 3adfad69..b85016a6 100644
--- a/app/jobs/related_url_import_job.rb
+++ b/app/jobs/related_url_import_job.rb
@@ -1,4 +1,4 @@
-class RelatedUrlImportJob < ActiveJob::Base
+class RelatedUrlImportJob < ApplicationJob
   queue_as :levriero
 
   def perform(item)
diff --git a/app/jobs/report_import_job.rb b/app/jobs/report_import_job.rb
index 864d19f7..441c4310 100644
--- a/app/jobs/report_import_job.rb
+++ b/app/jobs/report_import_job.rb
@@ -1,9 +1,9 @@
-class ReportImportJob < ActiveJob::Base
+class ReportImportJob < ApplicationJob
   queue_as :levriero
 
-  ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png"
+  ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png".freeze
 
-  def perform(item, options={})
+  def perform(item, options = {})
     response = UsageUpdate.get_data(item, options)
     if response.status != 200
       Rails.logger.error "[Usage Report Parsing] Report #{item} not found"
diff --git a/app/jobs/usage_update_export_job.rb b/app/jobs/usage_update_export_job.rb
index 199fc03a..10d694a7 100644
--- a/app/jobs/usage_update_export_job.rb
+++ b/app/jobs/usage_update_export_job.rb
@@ -1,7 +1,7 @@
-class UsageUpdateExportJob < ActiveJob::Base
+class UsageUpdateExportJob < ApplicationJob
   queue_as :levriero_usage
 
-  def perform(item, options={})
+  def perform(item, options = {})
     response = UsageUpdate.push_item(item, options)
     item = JSON.parse(item)
     if response.status == 201
diff --git a/app/jobs/usage_update_import_by_year_job.rb b/app/jobs/usage_update_import_by_year_job.rb
index c497f1db..934a3b9a 100644
--- a/app/jobs/usage_update_import_by_year_job.rb
+++ b/app/jobs/usage_update_import_by_year_job.rb
@@ -1,7 +1,7 @@
-class UsageUpdateImportByYearJob < ActiveJob::Base
+class UsageUpdateImportByYearJob < ApplicationJob
   queue_as :levriero_usage
 
-  def perform(options={})
+  def perform(options = {})
     UsageUpdate.import_reports(options)
   end
-end
\ No newline at end of file
+end
diff --git a/app/jobs/usage_update_parse_job.rb b/app/jobs/usage_update_parse_job.rb
index 735c6fe5..9196a478 100644
--- a/app/jobs/usage_update_parse_job.rb
+++ b/app/jobs/usage_update_parse_job.rb
@@ -1,4 +1,4 @@
-class UsageUpdateParseJob < ActiveJob::Base
+class UsageUpdateParseJob < ApplicationJob
   queue_as :levriero_usage
 
   ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png".freeze
@@ -8,12 +8,13 @@ def perform(dataset, options)
     # report = Report.new(report_header, options)
     data = Report.translate_datasets dataset, options
     # data = Report.new(response, options).parse_data
-    send_message(data, options[:url], slack_webhook_url: ENV["SLACK_WEBHOOK_URL"])
+    send_message(data, options[:url],
+                 slack_webhook_url: ENV["SLACK_WEBHOOK_URL"])
     options.merge(
       report_meta: {
-        report_id: options[:header].dig("report-id"),
-        created_by: options[:header].dig("created-by"),
-        reporting_period: options[:header].dig("reporting-period"),
+        report_id: options[:header]["report-id"],
+        created_by: options[:header]["created-by"],
+        reporting_period: options[:header]["reporting-period"],
       },
     )
 
diff --git a/app/models/ability.rb b/app/models/ability.rb
index 2079406e..2c059ae7 100644
--- a/app/models/ability.rb
+++ b/app/models/ability.rb
@@ -7,9 +7,10 @@ def initialize(user)
     user ||= User.new(nil) # Guest user
     @user = user
 
-    if user.role_id == "staff_admin"
+    case user.role_id
+    when "staff_admin"
       can :manage, :all
-    elsif user.role_id == "staff_user"
+    when "staff_user"
       can :read, :all
     end
   end
diff --git a/app/models/affiliation_identifier.rb b/app/models/affiliation_identifier.rb
index ebd33722..e89004d7 100644
--- a/app/models/affiliation_identifier.rb
+++ b/app/models/affiliation_identifier.rb
@@ -1,13 +1,15 @@
 class AffiliationIdentifier < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      AffiliationIdentifierImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      AffiliationIdentifierImportByMonthJob.perform_later(
+        from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"),
+      )
     end
 
     "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -18,7 +20,9 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     name_identifier = AffiliationIdentifier.new
-    name_identifier.queue_jobs(name_identifier.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    name_identifier.queue_jobs(name_identifier.unfreeze(
+                                 from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                               ))
   end
 
   def source_id
@@ -29,17 +33,17 @@ def query
     "creators.affiliation.affiliationIdentifierScheme:ROR"
   end
 
-  def push_data(result, options = {})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
 
     Array.wrap(items).map do |item|
-      begin
-        AffiliationIdentifierImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      AffiliationIdentifierImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     items.length
@@ -48,15 +52,18 @@ def push_data(result, options = {})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers", nil))
+    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                      nil))
     skip_doi = related_identifiers.any? do |related_identifier|
-      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf", "IsVersionOf"].include?(related_identifier["relatedIdentifierType"])
+      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf",
+       "IsVersionOf"].include?(related_identifier["relatedIdentifierType"])
     end
 
-    affiliation_identifiers = attributes.fetch("creators", []).reduce([]) do |sum, c| 
+    affiliation_identifiers = attributes.fetch("creators",
+                                               []).reduce([]) do |sum, c|
       Array.wrap(c["affiliation"]).each do |a|
         sum << a["affiliationIdentifier"] if a["affiliationIdentifierScheme"] == "ROR"
       end
@@ -65,10 +72,11 @@ def self.push_item(item)
     end
 
     return nil if affiliation_identifiers.blank? || skip_doi
+
     source_id = item.fetch("sourceId", "datacite_affiliation")
     relation_type_id = "is_authored_at"
-    source_token = ENV['DATACITE_AFFILIATION_SOURCE_TOKEN']
-    
+    source_token = ENV["DATACITE_AFFILIATION_SOURCE_TOKEN"]
+
     push_items = Array.wrap(affiliation_identifiers).reduce([]) do |ssum, iitem|
       obj_id = normalize_ror(iitem)
 
@@ -88,17 +96,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # there can be one or more affiliation_identifier per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -112,12 +120,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
@@ -134,23 +145,24 @@ def self.push_item(item)
   end
 
   def self.get_ror_metadata(id)
-    return {} unless id.present?
+    return {} if id.blank?
 
-    url = "https://api.ror.org/organizations/" + id[8..-1]
+    url = "https://api.ror.org/organizations/#{id[8..-1]}"
     response = Maremma.get(url, host: true)
     return {} if response.status != 200
 
     message = response.body.fetch("data", {})
-    
-    location = { 
+
+    location = {
       "type" => "postalAddress",
-      "addressCountry" => message.dig("country", "country_name")
+      "addressCountry" => message.dig("country", "country_name"),
     }
-    
+
     {
       "@id" => id,
       "@type" => "Organization",
       "name" => message["name"],
-      "location" => location }.compact
+      "location" => location,
+    }.compact
   end
 end
diff --git a/app/models/base.rb b/app/models/base.rb
index 9f907fd1..508f4464 100644
--- a/app/models/base.rb
+++ b/app/models/base.rb
@@ -6,17 +6,18 @@ class Base
   include ::Bolognese::MetadataUtils
 
   # icon for Slack messages
-  ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png"
+  ICON_URL = "https://raw.githubusercontent.com/datacite/toccatore/master/lib/toccatore/images/toccatore.png".freeze
 
   def queue(_options = {})
-    Rails.logger.error "Queue name has not been specified" unless ENV["ENVIRONMENT"].present?
-    Rails.logger.error "AWS_REGION has not been specified" unless ENV["AWS_REGION"].present?
+    Rails.logger.error "Queue name has not been specified" if ENV["ENVIRONMENT"].blank?
+    Rails.logger.error "AWS_REGION has not been specified" if ENV["AWS_REGION"].blank?
     region = ENV["AWS_REGION"] ||= "eu-west-1"
     Aws::SQS::Client.new(region: region.to_s, stub_responses: false)
   end
 
   def get_message(_options = {})
-    sqs.receive_message(queue_url: queue_url, max_number_of_messages: 1, wait_time_seconds: 1)
+    sqs.receive_message(queue_url: queue_url, max_number_of_messages: 1,
+                        wait_time_seconds: 1)
   end
 
   def delete_message(message)
@@ -58,7 +59,7 @@ def get_query_url(options = {})
     # end
 
     params = {
-      query: query + " AND " + updated,
+      query: "#{query} AND #{updated}",
       "resource-type-id" => options[:resource_type_id],
       "page[number]" => options[:number],
       "page[size]" => options[:size],
@@ -78,13 +79,15 @@ def get_total(options = {})
   def queue_jobs(options = {})
     options[:number] = options[:number].to_i || 1
     options[:size] = options[:size].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
     options[:content_type] = "json"
 
     total = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor, unless test environment
       total_pages = Rails.env.test? ? 1 : (total.to_f / job_batch_size).ceil
       error_total = 0
@@ -102,15 +105,18 @@ def queue_jobs(options = {})
     Rails.logger.info text
 
     # send slack notification
-    options[:level] = if total == 0
+    options[:level] = if total.zero?
                         "warning"
-                      elsif error_total > 0
+                      elsif error_total.positive?
                         "danger"
                       else
                         "good"
                       end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of dois queued
     total
@@ -127,7 +133,7 @@ def get_data(options = {})
   end
 
   def url
-    ENV["API_URL"] + "/dois?"
+    "#{ENV['API_URL']}/dois?"
   end
 
   def timeout
@@ -139,7 +145,7 @@ def job_batch_size
   end
 
   def send_notification_to_slack(text, options = {})
-    return nil unless options[:slack_webhook_url].present?
+    return nil if options[:slack_webhook_url].blank?
 
     attachment = {
       title: options[:title] || "Report",
@@ -164,11 +170,12 @@ def self.doi_from_url(url)
   def self.parse_attributes(element, options = {})
     content = options[:content] || "__content__"
 
-    if element.is_a?(String)
+    case element
+    when String
       element
-    elsif element.is_a?(Hash)
+    when Hash
       element.fetch(content, nil)
-    elsif element.is_a?(Array)
+    when Array
       a = element.map { |e| e.is_a?(Hash) ? e.fetch(content, nil) : e }.uniq
       a = options[:first] ? a.first : a.unwrap
     end
@@ -195,7 +202,7 @@ def self.map_hash_keys(element: nil, mapping: nil)
   end
 
   def self.get_date(dates, date_type)
-    dd = Array.wrap(dates).find { |d| d["dateType"] == date_type } || {}
+    dd = Array.wrap(dates).detect { |d| d["dateType"] == date_type } || {}
     dd.fetch("date", nil)
   end
 
@@ -208,7 +215,10 @@ def self.get_date_from_date_parts(date_as_parts)
   end
 
   def self.get_date_from_parts(year, month = nil, day = nil)
-    [year.to_s.rjust(4, "0"), month.to_s.rjust(2, "0"), day.to_s.rjust(2, "0")].reject { |part| part == "00" }.join("-")
+    [year.to_s.rjust(4, "0"), month.to_s.rjust(2, "0"),
+     day.to_s.rjust(2, "0")].reject do |part|
+      part == "00"
+    end.join("-")
   end
 
   def self.get_datacite_xml(id)
@@ -293,7 +303,7 @@ def self.get_crossref_metadata(id)
       "@id" => id,
       "@type" => type,
       "datePublished" => date_published,
-      "registrantId" => "crossref." + meta["member"],
+      "registrantId" => "crossref.#{meta['member']}",
     }.compact
   end
 
@@ -307,7 +317,8 @@ def self.parse_datacite_metadata(id: nil, response: nil)
                     "name" => attributes["publisher"] }
                 end
     proxy_identifiers = Array.wrap(attributes["relatedIdentifiers"]).select do |ri|
-                          ["IsVersionOf", "IsIdenticalTo", "IsPartOf", "IsSupplementTo"].include?(ri["relationType"])
+                          ["IsVersionOf", "IsIdenticalTo", "IsPartOf",
+                           "IsSupplementTo"].include?(ri["relationType"])
                         end.map do |ri|
       ri["relatedIdentifier"]
     end
@@ -342,7 +353,7 @@ def self.get_crossref_member_id(id, _options = {})
 
   def self.get_researcher_metadata(id)
     orcid = orcid_from_url(id)
-    return {} unless orcid.present?
+    return {} if orcid.blank?
 
     url = ENV["API_URL"] + "/users/#{orcid}"
     response = Maremma.get(url)
@@ -362,7 +373,7 @@ def self.get_orcid_metadata(id)
 
     # otherwise store ORCID metadata with DataCite
     orcid = orcid_from_url(id)
-    return {} unless orcid.present?
+    return {} if orcid.blank?
 
     url = ENV["ORCID_API_URL"] + "/#{orcid}/person"
     response = Maremma.get(url, accept: "application/vnd.orcid+json")
diff --git a/app/models/concerns/authenticable.rb b/app/models/concerns/authenticable.rb
index dde014e3..274e9a49 100644
--- a/app/models/concerns/authenticable.rb
+++ b/app/models/concerns/authenticable.rb
@@ -1,36 +1,40 @@
 module Authenticable
   extend ActiveSupport::Concern
 
-  require 'jwt'
+  require "jwt"
   require "base64"
 
   included do
     # encode JWT token using SHA-256 hash algorithm
     def encode_token(payload)
       # replace newline characters with actual newlines
-      private_key = OpenSSL::PKey::RSA.new(ENV['JWT_PRIVATE_KEY'].to_s.gsub('\n', "\n"))
-      JWT.encode(payload, private_key, 'RS256')
-    rescue JSON::GeneratorError => error
-      Rails.logger.error "JSON::GeneratorError: " + error.message + " for " + payload
-      return nil
+      private_key = OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub(
+                                             '\n', "\n"
+                                           ))
+      JWT.encode(payload, private_key, "RS256")
+    rescue JSON::GeneratorError => e
+      Rails.logger.error "JSON::GeneratorError: #{e.message} for #{payload}"
+      nil
     end
 
     # decode JWT token using SHA-256 hash algorithm
     def decode_token(token)
-      public_key = OpenSSL::PKey::RSA.new(ENV['JWT_PUBLIC_KEY'].to_s.gsub('\n', "\n"))
-      payload = (JWT.decode token, public_key, true, { :algorithm => 'RS256' }).first
+      public_key = OpenSSL::PKey::RSA.new(ENV["JWT_PUBLIC_KEY"].to_s.gsub('\n',
+                                                                          "\n"))
+      payload = (JWT.decode token, public_key, true,
+                            { algorithm: "RS256" }).first
 
       # check whether token has expired
       return {} unless Time.now.to_i < payload["exp"].to_i
 
       payload
-    rescue JWT::DecodeError => error
-      Rails.logger.error "JWT::DecodeError: " + error.message + " for " + token
-      return {}
-    rescue OpenSSL::PKey::RSAError => error
-      public_key = ENV['JWT_PUBLIC_KEY'].presence || "nil"
-      Rails.logger.error "OpenSSL::PKey::RSAError: " + error.message + " for " + public_key
-      return {}
+    rescue JWT::DecodeError => e
+      Rails.logger.error "JWT::DecodeError: #{e.message} for #{token}"
+      {}
+    rescue OpenSSL::PKey::RSAError => e
+      public_key = ENV["JWT_PUBLIC_KEY"].presence || "nil"
+      Rails.logger.error "OpenSSL::PKey::RSAError: #{e.message} for #{public_key}"
+      {}
     end
 
     # basic auth
@@ -44,13 +48,14 @@ def encode_auth_param(username: nil, password: nil)
     def decode_auth_param(username: nil, password: nil)
       return {} unless username.present? && password.present?
 
-      if username.include?(".")
-        user = Client.where(symbol: username.upcase).first
-      else
-        user = Provider.unscoped.where(symbol: username.upcase).first
-      end
+      user = if username.include?(".")
+               Client.where(symbol: username.upcase).first
+             else
+               Provider.unscoped.where(symbol: username.upcase).first
+             end
 
-      return {} unless user && secure_compare(user.password, encrypt_password_sha256(password))
+      return {} unless user && secure_compare(user.password,
+                                              encrypt_password_sha256(password))
 
       uid = username.downcase
 
@@ -71,14 +76,10 @@ def get_payload(uid: nil, user: nil)
       }
 
       if uid.include? "."
-        payload.merge!({
-          "provider_id" => uid.split(".", 2).first,
-          "client_id" => uid,
-        })
+        payload["provider_id"] = uid.split(".", 2).first
+        payload["client_id"] = uid
       elsif uid != "admin"
-        payload.merge!({
-          "provider_id" => uid,
-        })
+        payload["provider_id"] = uid
       end
 
       payload
@@ -88,11 +89,12 @@ def get_payload(uid: nil, user: nil)
     # from Devise
     def secure_compare(a, b)
       return false if a.blank? || b.blank? || a.bytesize != b.bytesize
+
       l = a.unpack "C#{a.bytesize}"
 
       res = 0
       b.each_byte { |byte| res |= byte ^ l.shift }
-      res == 0
+      res.zero?
     end
   end
 
@@ -100,8 +102,10 @@ module ClassMethods
     # encode token using SHA-256 hash algorithm
     def encode_token(payload)
       # replace newline characters with actual newlines
-      private_key = OpenSSL::PKey::RSA.new(ENV['JWT_PRIVATE_KEY'].to_s.gsub('\n', "\n"))
-      JWT.encode(payload, private_key, 'RS256')
+      private_key = OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub(
+                                             '\n', "\n"
+                                           ))
+      JWT.encode(payload, private_key, "RS256")
     rescue OpenSSL::PKey::RSAError => e
       Rails.logger.error e.inspect
 
@@ -116,16 +120,16 @@ def encode_auth_param(username: nil, password: nil)
     end
 
     # generate JWT token
-    def generate_token(attributes={})
+    def generate_token(attributes = {})
       payload = {
-        uid:  attributes.fetch(:uid, "0000-0001-5489-3594"),
+        uid: attributes.fetch(:uid, "0000-0001-5489-3594"),
         name: attributes.fetch(:name, "Josiah Carberry"),
         email: attributes.fetch(:email, nil),
         provider_id: attributes.fetch(:provider_id, nil),
         client_id: attributes.fetch(:client_id, nil),
         role_id: attributes.fetch(:role_id, "staff_admin"),
         iat: Time.now.to_i,
-        exp: Time.now.to_i + attributes.fetch(:exp, 30)
+        exp: Time.now.to_i + attributes.fetch(:exp, 30),
       }.compact
 
       encode_token(payload)
@@ -145,14 +149,10 @@ def get_payload(uid: nil, user: nil)
       }
 
       if uid.include? "."
-        payload.merge!({
-          "provider_id" => uid.split(".", 2).first,
-          "client_id" => uid
-        })
+        payload["provider_id"] = uid.split(".", 2).first
+        payload["client_id"] = uid
       elsif uid != "admin"
-        payload.merge!({
-          "provider_id" => uid
-        })
+        payload["provider_id"] = uid
       end
 
       payload
diff --git a/app/models/concerns/cacheable.rb b/app/models/concerns/cacheable.rb
index 6a1f66c1..3c667e7e 100644
--- a/app/models/concerns/cacheable.rb
+++ b/app/models/concerns/cacheable.rb
@@ -38,7 +38,7 @@ def cached_doi_ra(doi)
         Base.get_doi_ra(prefix)
       end
     end
-  
+
     def cached_crossref_member_id(id)
       Rails.cache.fetch("member_ids/#{id}", expires_in: 1.day) do
         Base.get_crossref_member_id(id)
diff --git a/app/models/concerns/helpable.rb b/app/models/concerns/helpable.rb
index c92fa0d8..a02b1d02 100644
--- a/app/models/concerns/helpable.rb
+++ b/app/models/concerns/helpable.rb
@@ -8,11 +8,10 @@ def set_event_for_bus(event)
       event["obj"] = format_for_bus(event["obj"])
       event
     end
-  
+
     def format_for_bus(metadata)
       { "pid" => metadata["@id"],
         "work_type_id" => metadata["@type"] }.compact
     end
-  
   end
-end
\ No newline at end of file
+end
diff --git a/app/models/concerns/importable.rb b/app/models/concerns/importable.rb
index f2c68d47..5db0d31f 100644
--- a/app/models/concerns/importable.rb
+++ b/app/models/concerns/importable.rb
@@ -4,18 +4,18 @@ module Importable
   included do
     # strong_parameters throws an error, using attributes hash
     def update_record(attributes)
-      if update_attributes(attributes)
-        Rails.logger.debug self.class.name + " " + id + " updated."
+      if update(attributes)
+        Rails.logger.debug "#{self.class.name} #{id} updated."
       else
-        Rails.logger.error self.class.name + " " + id + " not updated: " + errors.to_a.inspect
+        Rails.logger.error "#{self.class.name} #{id} not updated: #{errors.to_a.inspect}"
       end
     end
 
     def delete_record
       if destroy(refresh: true)
-        Rails.logger.debug self.class.name + " record deleted."
+        Rails.logger.debug "#{self.class.name} record deleted."
       else
-        Rails.logger.error self.class.name + " record not deleted: " + errors.to_a.inspect
+        Rails.logger.error "#{self.class.name} record not deleted: #{errors.to_a.inspect}"
       end
     end
   end
@@ -27,7 +27,8 @@ def get_doi_ra(prefix)
       url = "https://doi.org/ra/#{prefix}"
       result = Maremma.get(url)
 
-      return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+      return result.body.fetch("errors") if result.body.fetch("errors",
+                                                              nil).present?
 
       result.body.dig("data", 0, "RA")
     end
@@ -48,7 +49,7 @@ def normalize_doi(doi)
       doi = doi.delete("\u200B").downcase
 
       # turn DOI into URL, escape unsafe characters
-      "https://doi.org/" + Addressable::URI.encode(doi)
+      "https://doi.org/#{Addressable::URI.encode(doi)}"
     end
 
     def normalize_url(id)
@@ -56,7 +57,8 @@ def normalize_url(id)
 
       # check for valid protocol. We support AWS S3 and Google Cloud Storage
       uri = Addressable::URI.parse(id)
-      return nil unless uri&.host && %w(http https ftp s3 gs).include?(uri.scheme)
+      return nil unless uri&.host && %w(http https ftp s3
+                                        gs).include?(uri.scheme)
 
       # clean up URL
       PostRank::URI.clean(id)
@@ -70,7 +72,7 @@ def normalize_arxiv(id)
       id = id.downcase
 
       # turn arXiv into a URL if needed
-      id = "https://arxiv.org/abs/" + id[6..-1] if id.start_with?("arxiv:")
+      id = "https://arxiv.org/abs/#{id[6..-1]}" if id.start_with?("arxiv:")
 
       # check for valid protocol.
       uri = Addressable::URI.parse(id)
@@ -88,14 +90,14 @@ def normalize_igsn(id)
       id = id.downcase
 
       # turn igsn into a URL if needed
-      id = "https://hdl.handle.net/10273/" + id unless id.start_with?("http")
+      id = "https://hdl.handle.net/10273/#{id}" unless id.start_with?("http")
 
       # check for valid protocol.
       uri = Addressable::URI.parse(id)
       return nil unless uri&.host && %w(http https).include?(uri.scheme)
 
       # don't use IGSN resolver as no support for ssl
-      id = "https://hdl.handle.net/10273/" + id[15..-1] if id.start_with?("http://igsn.org")
+      id = "https://hdl.handle.net/10273/#{id[15..-1]}" if id.start_with?("http://igsn.org")
 
       # clean up URL
       PostRank::URI.clean(id.downcase)
@@ -109,7 +111,7 @@ def normalize_handle(id)
       id = id.downcase
 
       # turn handle into a URL if needed
-      id = "https://hdl.handle.net/" + id unless id.start_with?("http")
+      id = "https://hdl.handle.net/#{id}" unless id.start_with?("http")
 
       # check for valid protocol.
       uri = Addressable::URI.parse(id)
@@ -128,9 +130,9 @@ def normalize_pmid(id)
 
       # strip pmid prefix
       id = id[5..-1] if id.start_with?("pmid:")
-      
+
       # turn handle into a URL if needed
-      id = "https://identifiers.org/pubmed:" + id unless id.start_with?("http")
+      id = "https://identifiers.org/pubmed:#{id}" unless id.start_with?("http")
 
       # check for valid protocol.
       uri = Addressable::URI.parse(id)
@@ -160,7 +162,7 @@ def normalize_orcid(orcid)
       return nil if orcid.blank?
 
       # turn ORCID ID into URL
-      "https://orcid.org/" + Addressable::URI.encode(orcid)
+      "https://orcid.org/#{Addressable::URI.encode(orcid)}"
     end
 
     def validate_ror(ror_id)
@@ -172,11 +174,11 @@ def normalize_ror(ror_id)
       return nil if ror_id.blank?
 
       # turn ROR ID into URL
-      "https://" + Addressable::URI.encode(ror_id)
+      "https://#{Addressable::URI.encode(ror_id)}"
     end
 
     def import_from_api
-      route = self.name.downcase + "s"
+      route = "#{name.downcase}s"
       page_number = 1
       total_pages = 1
       total = 0
@@ -187,11 +189,13 @@ def import_from_api
         url = ENV["API_URL"] + "/#{route}?" + URI.encode_www_form(params)
 
         response = Maremma.get(url, content_type: "application/vnd.api+json")
-        Rails.logger.error response.body["errors"].inspect if response.body.fetch("errors", nil).present?
+        Rails.logger.error response.body["errors"].inspect if response.body.fetch(
+          "errors", nil
+        ).present?
 
         records = response.body.fetch("data", [])
         records.each do |data|
-          if self.name == "Client"
+          if name == "Client"
             provider_id = data.dig("relationships", "provider", "data", "id")
             data["attributes"]["provider_id"] = provider_id
           end
@@ -200,7 +204,7 @@ def import_from_api
         end
 
         processed = (page_number - 1) * 100 + records.size
-        Rails.logger.info "#{processed} " + self.name.downcase + "s processed."
+        Rails.logger.info "#{processed} #{name.downcase}s processed."
 
         page_number = response.body.dig("meta", "page").to_i + 1
         total = response.body.dig("meta", "total") || total
@@ -213,7 +217,10 @@ def import_from_api
     def parse_record(sqs_msg: nil, data: nil)
       id = "https://doi.org/#{data['id']}"
       response = get_datacite_json(id)
-      related_identifiers = Array.wrap(response.fetch("relatedIdentifiers", nil)).select { |r| ["DOI", "URL"].include?(r["relatedIdentifierType"]) }
+      related_identifiers = Array.wrap(response.fetch("relatedIdentifiers",
+                                                      nil)).select do |r|
+        ["DOI", "URL"].include?(r["relatedIdentifierType"])
+      end
 
       if related_identifiers.any? { |r| r["relatedIdentifierType"] == "DOI" }
         item = {
@@ -233,7 +240,10 @@ def parse_record(sqs_msg: nil, data: nil)
         RelatedUrl.push_item(item)
       end
 
-      funding_references = Array.wrap(response.fetch("fundingReferences", nil)).select { |f| f.fetch("funderIdentifierType", nil) == "Crossref Funder ID" }
+      funding_references = Array.wrap(response.fetch("fundingReferences",
+                                                     nil)).select do |f|
+        f.fetch("funderIdentifierType", nil) == "Crossref Funder ID"
+      end
       if funding_references.present?
         item = {
           "doi" => data["id"],
@@ -243,7 +253,13 @@ def parse_record(sqs_msg: nil, data: nil)
         FunderIdentifier.push_item(item)
       end
 
-      name_identifiers = Array.wrap(response.fetch("creators", nil)).select { |n| Array.wrap(n.fetch("nameIdentifiers", nil)).any? { |n| n["nameIdentifierScheme"] == "ORCID" } }
+      name_identifiers = Array.wrap(response.fetch("creators",
+                                                   nil)).select do |n|
+        Array.wrap(n.fetch("nameIdentifiers",
+                           nil)).any? do |n|
+          n["nameIdentifierScheme"] == "ORCID"
+        end
+      end
       if name_identifiers.present?
         item = {
           "doi" => data["id"],
@@ -253,7 +269,17 @@ def parse_record(sqs_msg: nil, data: nil)
         NameIdentifier.push_item(item)
       end
 
-      affiliation_identifiers = Array.wrap(response.fetch("creators", nil)).select { |n| Array.wrap(n.fetch("affiliation", nil)).any? { |n| n["affiliationIdentifierScheme"] == "ROR" } && Array.wrap(n.fetch("nameIdentifiers", nil)).any? { |n| n["nameIdentifierScheme"] == "ORCID" } }
+      affiliation_identifiers = Array.wrap(response.fetch("creators",
+                                                          nil)).select do |n|
+        Array.wrap(n.fetch("affiliation",
+                           nil)).any? do |n|
+          n["affiliationIdentifierScheme"] == "ROR"
+        end && Array.wrap(n.fetch(
+                            "nameIdentifiers", nil
+                          )).any? do |n|
+                 n["nameIdentifierScheme"] == "ORCID"
+               end
+      end
       if affiliation_identifiers.present?
         item = {
           "doi" => data["id"],
@@ -263,7 +289,12 @@ def parse_record(sqs_msg: nil, data: nil)
         AffiliationIdentifier.push_item(item)
       end
 
-      orcid_affiliation = Array.wrap(response.fetch("creators", nil)).select { |n| Array.wrap(n.fetch("affiliation", nil)).any? { |n| n["affiliationIdentifierScheme"] == "ROR" } }
+      orcid_affiliation = Array.wrap(response.fetch("creators",
+                                                    nil)).select do |n|
+        Array.wrap(n.fetch("affiliation", nil)).any? do |n|
+          n["affiliationIdentifierScheme"] == "ROR"
+        end
+      end
       if orcid_affiliation.present?
         item = {
           "doi" => data["id"],
@@ -285,7 +316,7 @@ def parse_record(sqs_msg: nil, data: nil)
 
     def create_record(attributes)
       parameters = ActionController::Parameters.new(attributes)
-      self.new(parameters.permit(self.safe_params))
+      new(parameters.permit(safe_params))
     end
 
     def to_kebab_case(hsh)
diff --git a/app/models/concerns/indexable.rb b/app/models/concerns/indexable.rb
index 7bf20aad..c4703bf8 100644
--- a/app/models/concerns/indexable.rb
+++ b/app/models/concerns/indexable.rb
@@ -3,70 +3,78 @@ module Indexable
 
   module ClassMethods
     # don't raise an exception when not found
-    def find_by_id(id, options={})
-      return nil unless id.present?
+    def find_by_id(id, _options = {})
+      return nil if id.blank?
 
       __elasticsearch__.find(id.downcase)
-    rescue Elasticsearch::Transport::Transport::Errors::NotFound, Elasticsearch::Persistence::Repository::DocumentNotFound
+    rescue Elasticsearch::Transport::Transport::Errors::NotFound,
+           Elasticsearch::Persistence::Repository::DocumentNotFound
       nil
     end
 
-    def find_by_ids(ids, options={})
-      options[:sort] ||= { "_doc" => { order: 'asc' }}
+    def find_by_ids(ids, options = {})
+      options[:sort] ||= { "_doc" => { order: "asc" } }
 
       __elasticsearch__.search({
-        from: options[:from] || 0,
-        size: options[:size] || 25,
-        sort: [options[:sort]],
-        query: {
-          ids: {
-            values: ids.split(",").map(&:downcase)
-          }
-        },
-        aggregations: query_aggregations
-      })
+                                 from: options[:from] || 0,
+                                 size: options[:size] || 25,
+                                 sort: [options[:sort]],
+                                 query: {
+                                   ids: {
+                                     values: ids.split(",").map(&:downcase),
+                                   },
+                                 },
+                                 aggregations: query_aggregations,
+                               })
     end
 
-    def query(query, options={})
+    def query(query, options = {})
       __elasticsearch__.search({
-        from: options[:from],
-        size: options[:size],
-        sort: [options[:sort]],
-        query: {
-          bool: {
-            must: {
-              query_string: {
-                query: query + "*",
-                fields: query_fields
-              }
-            },
-            filter: query_filter(options)
-          }
-        },
-        aggregations: query_aggregations
-      })
+                                 from: options[:from],
+                                 size: options[:size],
+                                 sort: [options[:sort]],
+                                 query: {
+                                   bool: {
+                                     must: {
+                                       query_string: {
+                                         query: "#{query}*",
+                                         fields: query_fields,
+                                       },
+                                     },
+                                     filter: query_filter(options),
+                                   },
+                                 },
+                                 aggregations: query_aggregations,
+                               })
     end
 
     def query_fields
-      ['symbol^10', 'name^10', 'contact_name^10', 'contact_email^10', '_all']
+      ["symbol^10", "name^10", "contact_name^10", "contact_email^10", "_all"]
     end
 
     def query_filter(options = {})
-      return nil unless options[:year].present?
+      return nil if options[:year].blank?
 
       {
         terms: {
-          year: options[:year].split(",")
-        }
+          year: options[:year].split(","),
+        },
       }
     end
 
-    def recreate_index(options={})
-      client     = self.gateway.client
+    def recreate_index(options = {})
+      client     = gateway.client
       index_name = self.index_name
 
-      client.indices.delete index: index_name rescue nil if options[:force]
-      client.indices.create index: index_name, body: { settings:  {"index.requests.cache.enable": true }}
+      if options[:force]
+        begin
+          client.indices.delete index: index_name
+        rescue StandardError
+          nil
+        end
+      end
+      client.indices.create index: index_name,
+                            body: { settings: { "index.requests.cache.enable": true } }
     end
   end
 end
diff --git a/app/models/concerns/parserable.rb b/app/models/concerns/parserable.rb
index 357a25b0..faf0d53d 100644
--- a/app/models/concerns/parserable.rb
+++ b/app/models/concerns/parserable.rb
@@ -3,9 +3,10 @@ module Parserable
 
   module ClassMethods
     def correct_checksum?(encoded_report, checksum)
-      #puts checksum
-      #puts Digest::SHA256.hexdigest(Base64.decode64(encoded_report))
+      # puts checksum
+      # puts Digest::SHA256.hexdigest(Base64.decode64(encoded_report))
       return nil if Digest::SHA256.hexdigest(Base64.decode64(encoded_report)) != checksum
+
       true
     end
 
@@ -28,7 +29,7 @@ def parse_subset(json)
     # def report_type?
     #   return "normal" unless @data.dig("report","report-header","exceptions").present?
     #   return "normal" unless @data.dig("report","report-header","exceptions").any?
-    #   exceptions = @data.dig("report","report-header","exceptions") 
+    #   exceptions = @data.dig("report","report-header","exceptions")
     # end
 
     # def subsetted_report?
diff --git a/app/models/concerns/searchable.rb b/app/models/concerns/searchable.rb
index 0a0f52d3..304333f9 100644
--- a/app/models/concerns/searchable.rb
+++ b/app/models/concerns/searchable.rb
@@ -8,7 +8,7 @@ def all
       collect_data
     end
 
-    def where(options={})
+    def where(options = {})
       collect_data(options)
     end
 
@@ -17,19 +17,19 @@ def collect_data(options = {})
       parse_data(data, options)
     end
 
-    def get_data(options={})
+    def get_data(options = {})
       query_url = get_query_url(options)
       Maremma.get(query_url, options)
     end
 
-    def parse_items(items, options={})
+    def parse_items(items, options = {})
       Array(items).map do |item|
         parse_item(item, options)
       end
     end
 
-    def parse_item(item, options={})
-      self.new(item, options)
+    def parse_item(item, options = {})
+      new(item, options)
     end
 
     def parse_include(klass, params)
diff --git a/app/models/crossref.rb b/app/models/crossref.rb
index 0b0739fc..5925e917 100644
--- a/app/models/crossref.rb
+++ b/app/models/crossref.rb
@@ -5,7 +5,8 @@ def self.import_by_month(options = {})
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      CrossrefImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      CrossrefImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                             until_date: m.end_of_month.strftime("%F"))
     end
 
     "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -16,7 +17,8 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     crossref = Crossref.new
-    crossref.queue_jobs(crossref.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true))
+    crossref.queue_jobs(crossref.unfreeze(from_date: from_date.strftime("%F"),
+                                          until_date: until_date.strftime("%F"), host: true))
   end
 
   def source_id
@@ -30,9 +32,10 @@ def get_query_url(options = {})
       "until-collected-date" => options[:until_date],
       mailto: "info@datacite.org",
       rows: options[:rows],
-      cursor: options[:cursor] }.compact
+      cursor: options[:cursor],
+    }.compact
 
-    ENV["CROSSREF_QUERY_URL"] + "/v1/events?" + URI.encode_www_form(params)
+    "#{ENV['CROSSREF_QUERY_URL']}/v1/events?#{URI.encode_www_form(params)}"
   end
 
   def get_total(options = {})
@@ -45,13 +48,15 @@ def get_total(options = {})
   def queue_jobs(options = {})
     options[:offset] = options[:offset].to_i || 0
     options[:rows] = options[:rows].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
-    options[:content_type] = 'json'
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
+    options[:content_type] = "json"
 
     total, cursor = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor
       total_pages = (total.to_f / job_batch_size).ceil
       error_total = 0
@@ -67,25 +72,29 @@ def queue_jobs(options = {})
       text = "No DOIs updated #{options[:from_date]} - #{options[:until_date]}."
     end
 
-    Rails.logger.info "[Event Data] " + text
+    Rails.logger.info "[Event Data] #{text}"
 
     # send slack notification
-    if total == 0
-      options[:level] = "warning"
-    elsif error_total > 0
-      options[:level] = "danger"
-    else
-      options[:level] = "good"
-    end
+    options[:level] = if total.zero?
+                        "warning"
+                      elsif error_total.positive?
+                        "danger"
+                      else
+                        "good"
+                      end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of works queued
     total
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "message", "events")
     # Rails.logger.info "Extracting related identifiers for #{items.size} DOIs updated from #{options[:from_date]} until #{options[:until_date]}."
@@ -119,7 +128,10 @@ def self.push_item(item)
             "timestamp" => item["timestamp"],
             "license" => item["license"],
             "subj" => subj,
-            "obj" => obj } }}
+            "obj" => obj,
+          },
+        },
+      }
 
       response = Maremma.put(push_url, data: data.to_json,
                                        bearer: ENV["STAFF_ADMIN_TOKEN"],
diff --git a/app/models/crossref_funder.rb b/app/models/crossref_funder.rb
index 2b69acc5..38c3f222 100644
--- a/app/models/crossref_funder.rb
+++ b/app/models/crossref_funder.rb
@@ -1,5 +1,5 @@
 class CrossrefFunder < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
@@ -7,7 +7,8 @@ def self.import_by_month(options = {})
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      CrossrefFunderImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      CrossrefFunderImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                   until_date: m.end_of_month.strftime("%F"))
     end
 
     "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -18,7 +19,9 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     crossref_funder = CrossrefFunder.new
-    crossref_funder.queue_jobs(crossref_funder.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true))
+    crossref_funder.queue_jobs(crossref_funder.unfreeze(
+                                 from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true,
+                               ))
   end
 
   def source_id
@@ -30,10 +33,10 @@ def get_query_url(options = {})
       filter: "has-funder:true,from-created-date:#{options[:from_date]},until-created-date:#{options[:until_date]}",
       mailto: "info@datacite.org",
       rows: options[:rows],
-      cursor: options[:cursor] 
+      cursor: options[:cursor],
     }.compact
 
-    "https://api.crossref.org/works?" + URI.encode_www_form(params)
+    "https://api.crossref.org/works?#{URI.encode_www_form(params)}"
   end
 
   def get_total(options = {})
@@ -45,18 +48,20 @@ def get_total(options = {})
 
   def queue_jobs(options = {})
     options[:rows] = options[:rows].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
 
     total = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor, unless test environment
       total_pages = Rails.env.test? ? 1 : (total.to_f / job_batch_size).ceil
       error_total = 0
       cursor = "*"
 
-      (0...total_pages).each do |page|
+      (0...total_pages).each do |_page|
         options[:total] = total
         options[:cursor] = cursor
         count, cursor = process_data(options)
@@ -66,52 +71,55 @@ def queue_jobs(options = {})
       text = "No DOIs created #{options[:from_date]} - #{options[:until_date]}."
     end
 
-    Rails.logger.info "[Event Data] " + text
+    Rails.logger.info "[Event Data] #{text}"
 
     # send slack notification
-    if total == 0
-      options[:level] = "warning"
-    elsif error_total > 0
-      options[:level] = "danger"
-    else
-      options[:level] = "good"
-    end
+    options[:level] = if total.zero?
+                        "warning"
+                      elsif error_total.positive?
+                        "danger"
+                      else
+                        "good"
+                      end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of works queued
     total
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "message", "items")
 
     Array.wrap(items).map do |item|
-      begin
-        CrossrefFunderImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      CrossrefFunderImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     [items.length, result.body.dig("data", "message", "next-cursor")]
   end
-  
+
   def self.push_item(item)
     doi = item.fetch("DOI", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    
+
     funders = item.fetch("funder", []).select { |a| a["DOI"].present? }
     return nil if funders.blank?
 
     source_id = item.fetch("sourceId", "crossref_funder")
     relation_type_id = "is_funded_by"
-    source_token = ENV['CROSSREF_FUNDER_SOURCE_TOKEN']
-    
+    source_token = ENV["CROSSREF_FUNDER_SOURCE_TOKEN"]
+
     push_items = Array.wrap(funders).reduce([]) do |ssum, iitem|
       funder_identifier = iitem.fetch("DOI", nil)
       obj_id = normalize_doi(funder_identifier)
@@ -132,17 +140,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # send to DataCite Event Data API
     # don't send to Profiles service for ORCID claiming
     Array.wrap(push_items).each do |iiitem|
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -156,12 +164,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
@@ -177,4 +188,3 @@ def self.push_item(item)
     push_items.length
   end
 end
-  
\ No newline at end of file
diff --git a/app/models/crossref_import.rb b/app/models/crossref_import.rb
index 901eb3ae..4aabc587 100644
--- a/app/models/crossref_import.rb
+++ b/app/models/crossref_import.rb
@@ -1,5 +1,5 @@
 class CrossrefImport < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
@@ -7,7 +7,8 @@ def self.import_by_month(options = {})
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      CrossrefImportImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      CrossrefImportImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                   until_date: m.end_of_month.strftime("%F"))
     end
 
     "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -18,7 +19,9 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     crossref_import = CrossrefImport.new
-    crossref_import.queue_jobs(crossref_import.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true))
+    crossref_import.queue_jobs(crossref_import.unfreeze(
+                                 from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true,
+                               ))
   end
 
   def source_id
@@ -30,10 +33,10 @@ def get_query_url(options = {})
       filter: "from-created-date:#{options[:from_date]},until-created-date:#{options[:until_date]}",
       mailto: "info@datacite.org",
       rows: options[:rows],
-      cursor: options[:cursor] 
+      cursor: options[:cursor],
     }.compact
 
-    "https://api.crossref.org/works?" + URI.encode_www_form(params)
+    "https://api.crossref.org/works?#{URI.encode_www_form(params)}"
   end
 
   def get_total(options = {})
@@ -45,18 +48,20 @@ def get_total(options = {})
 
   def queue_jobs(options = {})
     options[:rows] = options[:rows].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
 
     total = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor, unless test environment
       total_pages = Rails.env.test? ? 1 : (total.to_f / job_batch_size).ceil
       error_total = 0
       cursor = "*"
 
-      (0...total_pages).each do |page|
+      (0...total_pages).each do |_page|
         options[:total] = total
         options[:cursor] = cursor
         count, cursor = process_data(options)
@@ -66,42 +71,45 @@ def queue_jobs(options = {})
       text = "No DOIs created #{options[:from_date]} - #{options[:until_date]}."
     end
 
-    Rails.logger.info "[Event Data] " + text
+    Rails.logger.info "[Event Data] #{text}"
 
     # send slack notification
-    if total == 0
-      options[:level] = "warning"
-    elsif error_total > 0
-      options[:level] = "danger"
-    else
-      options[:level] = "good"
-    end
+    options[:level] = if total.zero?
+                        "warning"
+                      elsif error_total.positive?
+                        "danger"
+                      else
+                        "good"
+                      end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of works queued
     total
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "message", "items")
 
     Array.wrap(items).map do |item|
-      begin
-        CrossrefRelatedImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      CrossrefRelatedImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     [items.length, result.body.dig("data", "message", "next-cursor")]
   end
-  
+
   def self.push_item(item)
     doi = item.fetch("DOI", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
 
@@ -117,10 +125,10 @@ def self.push_item(item)
 
     # don't send to Event Data Bus
     Array.wrap(push_items).each do |iiitem|
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "id" => iiitem["id"],
@@ -135,13 +143,16 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
-                                
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
+
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
         elsif response.status == 409
@@ -157,7 +168,10 @@ def self.push_item(item)
   end
 
   def self.push_related_items(item:, pid:)
-    related_doi_identifiers = Array.wrap(item.fetch("reference", nil)).select { |r| r["DOI"].present? }
+    related_doi_identifiers = Array.wrap(item.fetch("reference",
+                                                    nil)).select do |r|
+      r["DOI"].present?
+    end
     return [] if related_doi_identifiers.blank?
 
     registration_agencies = {}
@@ -167,27 +181,30 @@ def self.push_related_items(item:, pid:)
       related_identifier = iitem.fetch("DOI", nil).to_s.strip.downcase
       obj_id = normalize_doi(related_identifier)
       prefix = validate_prefix(related_identifier)
-      registration_agencies[prefix] = cached_doi_ra(related_identifier) unless registration_agencies[prefix]
+      unless registration_agencies[prefix]
+        registration_agencies[prefix] =
+          cached_doi_ra(related_identifier)
+      end
 
       if registration_agencies[prefix].nil?
         Rails.logger.error "No DOI registration agency for prefix #{prefix} found."
         source_id = "crossref_related"
-        source_token = ENV['CROSSREF_RELATED_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_RELATED_SOURCE_TOKEN"]
         obj = {}
       elsif registration_agencies[prefix] == "Crossref"
         source_id = "crossref_related"
-        source_token = ENV['CROSSREF_RELATED_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_RELATED_SOURCE_TOKEN"]
         obj = cached_crossref_response(obj_id)
       elsif registration_agencies[prefix] == "DataCite"
         source_id = "crossref_datacite"
-        source_token = ENV['CROSSREF_DATACITE_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_DATACITE_SOURCE_TOKEN"]
         obj = cached_datacite_response(obj_id)
       elsif registration_agencies[prefix].present?
         source_id = "crossref_#{registration_agencies[prefix].downcase}"
-        source_token = ENV['CROSSREF_OTHER_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_OTHER_SOURCE_TOKEN"]
         obj = {}
       end
-    
+
       if registration_agencies[prefix].present? && obj_id.present?
         subj = cached_datacite_response(pid)
 
@@ -204,7 +221,7 @@ def self.push_related_items(item:, pid:)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
   end
@@ -215,7 +232,7 @@ def self.push_orcid_items(item:, pid:)
 
     source_id = "crossref_orcid_auto_update"
     relation_type_id = "is_authored_by"
-    source_token = ENV['CROSSREF_ORCID_AUTO_UPDATE_SOURCE_TOKEN']
+    source_token = ENV["CROSSREF_ORCID_AUTO_UPDATE_SOURCE_TOKEN"]
 
     Array.wrap(creators).reduce([]) do |ssum, iitem|
       name_identifier = iitem.fetch("ORCID", nil)
@@ -237,7 +254,7 @@ def self.push_orcid_items(item:, pid:)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
   end
@@ -248,8 +265,8 @@ def self.push_funding_items(item:, pid:)
 
     source_id = "crossref_funder"
     relation_type_id = "is_funded_by"
-    source_token = ENV['CROSSREF_FUNDER_SOURCE_TOKEN']
-    
+    source_token = ENV["CROSSREF_FUNDER_SOURCE_TOKEN"]
+
     Array.wrap(funders).reduce([]) do |ssum, iitem|
       funder_identifier = iitem.fetch("DOI", nil)
       obj_id = normalize_doi(funder_identifier)
@@ -270,28 +287,27 @@ def self.push_funding_items(item:, pid:)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
   end
 
   def self.push_import_item(item:, pid:)
     source_id = "crossref_import"
-    source_token = ENV['CROSSREF_IMPORT_SOURCE_TOKEN']
+    source_token = ENV["CROSSREF_IMPORT_SOURCE_TOKEN"]
     subj = cached_crossref_response(pid)
 
     [{ "message_action" => "create",
-      "id" => SecureRandom.uuid,
-      "subj_id" => pid,
-      "obj_id" => nil,
-      "relation_type_id" => nil,
-      "source_id" => source_id,
-      "source_token" => source_token,
-      "occurred_at" => item.dig("created", "date-time"),
-      "timestamp" => Time.zone.now.iso8601,
-      "license" => LICENSE,
-      "subj" => subj,
-      "obj" => {} }]
+       "id" => SecureRandom.uuid,
+       "subj_id" => pid,
+       "obj_id" => nil,
+       "relation_type_id" => nil,
+       "source_id" => source_id,
+       "source_token" => source_token,
+       "occurred_at" => item.dig("created", "date-time"),
+       "timestamp" => Time.zone.now.iso8601,
+       "license" => LICENSE,
+       "subj" => subj,
+       "obj" => {} }]
   end
 end
-  
\ No newline at end of file
diff --git a/app/models/crossref_orcid.rb b/app/models/crossref_orcid.rb
index 51236c2a..bcf2c0ba 100644
--- a/app/models/crossref_orcid.rb
+++ b/app/models/crossref_orcid.rb
@@ -1,5 +1,5 @@
 class CrossrefOrcid < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
@@ -7,7 +7,8 @@ def self.import_by_month(options = {})
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      CrossrefOrcidImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      CrossrefOrcidImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                  until_date: m.end_of_month.strftime("%F"))
     end
 
     "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -18,7 +19,9 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     crossref_orcid = CrossrefOrcid.new
-    crossref_orcid.queue_jobs(crossref_orcid.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true))
+    crossref_orcid.queue_jobs(crossref_orcid.unfreeze(
+                                from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true,
+                              ))
   end
 
   def source_id
@@ -30,10 +33,10 @@ def get_query_url(options = {})
       filter: "has-orcid:true,from-created-date:#{options[:from_date]},until-created-date:#{options[:until_date]}",
       mailto: "info@datacite.org",
       rows: options[:rows],
-      cursor: options[:cursor] 
+      cursor: options[:cursor],
     }.compact
 
-    "https://api.crossref.org/works?" + URI.encode_www_form(params)
+    "https://api.crossref.org/works?#{URI.encode_www_form(params)}"
   end
 
   def get_total(options = {})
@@ -45,18 +48,20 @@ def get_total(options = {})
 
   def queue_jobs(options = {})
     options[:rows] = options[:rows].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
 
     total = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor, unless test environment
       total_pages = Rails.env.test? ? 1 : (total.to_f / job_batch_size).ceil
       error_total = 0
       cursor = "*"
 
-      (0...total_pages).each do |page|
+      (0...total_pages).each do |_page|
         options[:total] = total
         options[:cursor] = cursor
         count, cursor = process_data(options)
@@ -66,52 +71,55 @@ def queue_jobs(options = {})
       text = "No DOIs created #{options[:from_date]} - #{options[:until_date]}."
     end
 
-    Rails.logger.info "[Event Data] " + text
+    Rails.logger.info "[Event Data] #{text}"
 
     # send slack notification
-    if total == 0
-      options[:level] = "warning"
-    elsif error_total > 0
-      options[:level] = "danger"
-    else
-      options[:level] = "good"
-    end
+    options[:level] = if total.zero?
+                        "warning"
+                      elsif error_total.positive?
+                        "danger"
+                      else
+                        "good"
+                      end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of works queued
     total
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "message", "items")
 
     Array.wrap(items).map do |item|
-      begin
-        CrossrefOrcidImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      CrossrefOrcidImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     [items.length, result.body.dig("data", "message", "next-cursor")]
   end
-  
+
   def self.push_item(item)
     doi = item.fetch("DOI", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    
+
     creators = item.fetch("author", []).select { |a| a["ORCID"].present? }
     return nil if creators.blank?
 
     source_id = item.fetch("sourceId", "crossref_orcid_auto_update")
     relation_type_id = "is_authored_by"
-    source_token = ENV['CROSSREF_ORCID_AUTO_UPDATE_SOURCE_TOKEN']
-    
+    source_token = ENV["CROSSREF_ORCID_AUTO_UPDATE_SOURCE_TOKEN"]
+
     push_items = Array.wrap(creators).reduce([]) do |ssum, iitem|
       name_identifier = iitem.fetch("ORCID", nil)
       obj_id = normalize_orcid(name_identifier)
@@ -132,17 +140,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # send to DataCite Event Data API
     # don't send to Profiles service for ORCID claiming
     Array.wrap(push_items).each do |iiitem|
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -156,12 +164,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
@@ -177,4 +188,3 @@ def self.push_item(item)
     push_items.length
   end
 end
-  
\ No newline at end of file
diff --git a/app/models/crossref_related.rb b/app/models/crossref_related.rb
index 9f089168..44e1664a 100644
--- a/app/models/crossref_related.rb
+++ b/app/models/crossref_related.rb
@@ -1,5 +1,5 @@
 class CrossrefRelated < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
@@ -7,7 +7,9 @@ def self.import_by_month(options = {})
 
     # get first day of every month between from_date and until_date
     (from_date..until_date).select { |d| d.day == 1 }.each do |m|
-      CrossrefRelatedImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+      CrossrefRelatedImportByMonthJob.perform_later(
+        from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"),
+      )
     end
 
     "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
@@ -18,7 +20,9 @@ def self.import(options = {})
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     crossref_related = CrossrefRelated.new
-    crossref_related.queue_jobs(crossref_related.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true))
+    crossref_related.queue_jobs(crossref_related.unfreeze(
+                                  from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), host: true,
+                                ))
   end
 
   def source_id
@@ -30,10 +34,10 @@ def get_query_url(options = {})
       filter: "reference-visibility:open,has-references:true,from-created-date:#{options[:from_date]},until-created-date:#{options[:until_date]}",
       mailto: "info@datacite.org",
       rows: options[:rows],
-      cursor: options[:cursor] 
+      cursor: options[:cursor],
     }.compact
 
-    "https://api.crossref.org/works?" + URI.encode_www_form(params)
+    "https://api.crossref.org/works?#{URI.encode_www_form(params)}"
   end
 
   def get_total(options = {})
@@ -45,18 +49,20 @@ def get_total(options = {})
 
   def queue_jobs(options = {})
     options[:rows] = options[:rows].presence || job_batch_size
-    options[:from_date] = options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
-    options[:until_date] = options[:until_date].presence || Time.now.to_date.iso8601
+    options[:from_date] =
+      options[:from_date].presence || (Time.now.to_date - 1.day).iso8601
+    options[:until_date] =
+      options[:until_date].presence || Time.now.to_date.iso8601
 
     total = get_total(options)
 
-    if total > 0
+    if total.positive?
       # walk through results paginated via cursor, unless test environment
       total_pages = Rails.env.test? ? 1 : (total.to_f / job_batch_size).ceil
       error_total = 0
       cursor = "*"
 
-      (0...total_pages).each do |page|
+      (0...total_pages).each do |_page|
         options[:total] = total
         options[:cursor] = cursor
         count, cursor = process_data(options)
@@ -66,45 +72,51 @@ def queue_jobs(options = {})
       text = "No DOIs created #{options[:from_date]} - #{options[:until_date]}."
     end
 
-    Rails.logger.info "[Event Data] " + text
+    Rails.logger.info "[Event Data] #{text}"
 
     # send slack notification
-    if total == 0
-      options[:level] = "warning"
-    elsif error_total > 0
-      options[:level] = "danger"
-    else
-      options[:level] = "good"
-    end
+    options[:level] = if total.zero?
+                        "warning"
+                      elsif error_total.positive?
+                        "danger"
+                      else
+                        "good"
+                      end
     options[:title] = "Report for #{source_id}"
-    send_notification_to_slack(text, options) if options[:slack_webhook_url].present?
+    if options[:slack_webhook_url].present?
+      send_notification_to_slack(text,
+                                 options)
+    end
 
     # return number of works queued
     total
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "message", "items")
 
     Array.wrap(items).map do |item|
-      begin
-        CrossrefRelatedImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      CrossrefRelatedImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     [items.length, result.body.dig("data", "message", "next-cursor")]
   end
-  
+
   def self.push_item(item)
     doi = item.fetch("DOI", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_doi_identifiers = Array.wrap(item.fetch("reference", nil)).select { |r| r["DOI"].present? }
+    related_doi_identifiers = Array.wrap(item.fetch("reference",
+                                                    nil)).select do |r|
+      r["DOI"].present?
+    end
     registration_agencies = {}
     relation_type_id = "cites"
 
@@ -112,27 +124,30 @@ def self.push_item(item)
       related_identifier = iitem.fetch("DOI", nil).to_s.strip.downcase
       obj_id = normalize_doi(related_identifier)
       prefix = validate_prefix(related_identifier)
-      registration_agencies[prefix] = cached_doi_ra(related_identifier) unless registration_agencies[prefix]
+      unless registration_agencies[prefix]
+        registration_agencies[prefix] =
+          cached_doi_ra(related_identifier)
+      end
 
       if registration_agencies[prefix].nil?
         Rails.logger.error "No DOI registration agency for prefix #{prefix} found."
         source_id = "crossref_related"
-        source_token = ENV['CROSSREF_RELATED_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_RELATED_SOURCE_TOKEN"]
         obj = {}
       elsif registration_agencies[prefix] == "Crossref"
         source_id = "crossref_related"
-        source_token = ENV['CROSSREF_RELATED_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_RELATED_SOURCE_TOKEN"]
         obj = cached_crossref_response(obj_id)
       elsif registration_agencies[prefix] == "DataCite"
         source_id = "crossref_datacite"
-        source_token = ENV['CROSSREF_DATACITE_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_DATACITE_SOURCE_TOKEN"]
         obj = cached_datacite_response(obj_id)
       elsif registration_agencies[prefix].present?
         source_id = "crossref_#{registration_agencies[prefix].downcase}"
-        source_token = ENV['CROSSREF_OTHER_SOURCE_TOKEN']
+        source_token = ENV["CROSSREF_OTHER_SOURCE_TOKEN"]
         obj = {}
       end
-    
+
       if registration_agencies[prefix].present? && obj_id.present?
         subj = cached_datacite_response(pid)
 
@@ -149,7 +164,7 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
@@ -157,10 +172,10 @@ def self.push_item(item)
     # send to DataCite Event Data Query API
     # don't send to Event Data Bus
     Array.wrap(push_items).each do |iiitem|
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "id" => iiitem["id"],
@@ -175,13 +190,16 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
-                                
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
+
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
         elsif response.status == 409
@@ -196,4 +214,3 @@ def self.push_item(item)
     push_items.length
   end
 end
-  
\ No newline at end of file
diff --git a/app/models/doi.rb b/app/models/doi.rb
index 15f8105c..8a818bc9 100644
--- a/app/models/doi.rb
+++ b/app/models/doi.rb
@@ -2,7 +2,7 @@ class Doi < Base
   include Searchable
   include Indexable
 
-  def self.get_query_url(options={})
+  def self.get_query_url(options = {})
     if options[:id].present?
       "#{url}/#{options[:id]}"
     else
@@ -12,16 +12,16 @@ def self.get_query_url(options={})
                  year: options.fetch(:year, nil),
                  "page[size]" => options.dig(:page, :size),
                  "page[number]" => options.dig(:page, :number) }.compact
-      url + "?" + URI.encode_www_form(params)
+      "#{url}?#{URI.encode_www_form(params)}"
     end
   end
 
-  def self.parse_data(result, options={})
-    return nil if result.blank? || result['errors']
+  def self.parse_data(result, options = {})
+    return nil if result.blank? || result["errors"]
 
     if options[:id].present?
       item = result.body.fetch("data", {})
-      return nil unless item.present?
+      return nil if item.blank?
 
       { data: parse_item(item) }
     else
diff --git a/app/models/funder_identifier.rb b/app/models/funder_identifier.rb
index a022cf52..2c3343a9 100644
--- a/app/models/funder_identifier.rb
+++ b/app/models/funder_identifier.rb
@@ -1,24 +1,28 @@
 class FunderIdentifier < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      FunderIdentifierImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      FunderIdentifierImportByMonthJob.perform_later(
+        from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"),
+      )
     end
 
-    "Queued import for DOIs created from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     funder_identifier = FunderIdentifier.new
-    funder_identifier.queue_jobs(funder_identifier.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    funder_identifier.queue_jobs(funder_identifier.unfreeze(
+                                   from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                                 ))
   end
 
   def source_id
@@ -29,18 +33,18 @@ def query
     "fundingReferences.funderIdentifierType:\"Crossref Funder ID\""
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
     # Rails.logger.info "Extracting funder identifiers for #{items.size} DOIs updated from #{options[:from_date]} until #{options[:until_date]}."
 
     Array.wrap(items).map do |item|
-      begin
-        FunderIdentifierImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      FunderIdentifierImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     items.length
@@ -49,18 +53,22 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    funder_identifiers = Array.wrap(attributes.fetch('fundingReferences', nil)).select { |f| f["funderIdentifierType"] == "Crossref Funder ID" }
+    funder_identifiers = Array.wrap(attributes.fetch("fundingReferences",
+                                                     nil)).select do |f|
+      f["funderIdentifierType"] == "Crossref Funder ID"
+    end
 
     push_items = Array.wrap(funder_identifiers).reduce([]) do |ssum, iitem|
-      funder_identifier = iitem.fetch("funderIdentifier", nil).to_s.strip.downcase
+      funder_identifier = iitem.fetch("funderIdentifier",
+                                      nil).to_s.strip.downcase
       obj_id = normalize_doi(funder_identifier)
-      
+
       relation_type_id = "is_funded_by"
       source_id = "datacite_funder"
-      source_token = ENV['DATACITE_FUNDER_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_FUNDER_SOURCE_TOKEN"]
 
       if funder_identifier.present? && obj_id.present?
         subj = cached_datacite_response(pid)
@@ -78,17 +86,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # there can be one or more funder_identifier per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -102,12 +110,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
@@ -129,24 +140,23 @@ def self.get_funder_metadata(id)
     response = Maremma.get(url, host: true)
 
     return {} if response.status != 200
-    
+
     message = response.body.dig("data", "message")
-    
-    if message["location"].present?
-      location = { 
-        "type" => "postalAddress",
-        "addressCountry" => message["location"]
-      }
-    else
-      location = nil
-    end
-    
+
+    location = if message["location"].present?
+                 {
+                   "type" => "postalAddress",
+                   "addressCountry" => message["location"],
+                 }
+               end
+
     {
       "@id" => id,
       "@type" => "Funder",
       "name" => message["name"],
       "alternateName" => message["alt-names"],
       "location" => location,
-      "dateModified" => "2018-07-11T00:00:00Z" }.compact
+      "dateModified" => "2018-07-11T00:00:00Z",
+    }.compact
   end
 end
diff --git a/app/models/heartbeat.rb b/app/models/heartbeat.rb
index fa3238bd..d9706da3 100644
--- a/app/models/heartbeat.rb
+++ b/app/models/heartbeat.rb
@@ -17,7 +17,7 @@ def memcached_up?
     memcached_client = Dalli::Client.new
     memcached_client.alive!
     true
-  rescue
+  rescue StandardError
     false
   end
 end
diff --git a/app/models/name_identifier.rb b/app/models/name_identifier.rb
index 53ace768..b166d690 100644
--- a/app/models/name_identifier.rb
+++ b/app/models/name_identifier.rb
@@ -1,5 +1,5 @@
 class NameIdentifier < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
@@ -33,7 +33,8 @@ def self.import_one(options = {})
     end
 
     attributes = get_datacite_json(doi)
-    response = push_item({ "id" => doi, "type" => "dois", "attributes" => attributes })
+    response = push_item({ "id" => doi, "type" => "dois",
+                           "attributes" => attributes })
   end
 
   def source_id
@@ -45,7 +46,8 @@ def query
   end
 
   def push_data(result, _options = {})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
 
@@ -62,10 +64,11 @@ def push_data(result, _options = {})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers", nil))
+    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                      nil))
     skip_doi = related_identifiers.any? do |related_identifier|
       ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf",
        "IsVersionOf"].include?(related_identifier["relatedIdentifierType"])
@@ -82,7 +85,8 @@ def self.push_item(item)
     source_token = ENV["DATACITE_ORCID_AUTO_UPDATE_SOURCE_TOKEN"]
 
     push_items = Array.wrap(creators).reduce([]) do |ssum, iitem|
-      name_identifier = Array.wrap(iitem.fetch("nameIdentifiers", nil)).find do |n|
+      name_identifier = Array.wrap(iitem.fetch("nameIdentifiers",
+                                               nil)).detect do |n|
         n["nameIdentifierScheme"] == "ORCID"
       end
       obj_id = normalize_orcid(name_identifier["nameIdentifier"]) if name_identifier.present?
@@ -111,7 +115,7 @@ def self.push_item(item)
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data API
       if ENV["STAFF_ADMIN_TOKEN"].present?
-        push_url = ENV["LAGOTTINO_URL"] + "/events"
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
         data = {
           "data" => {
@@ -149,7 +153,7 @@ def self.push_item(item)
 
       # send to Profiles service, which then pushes to ORCID
       if ENV["STAFF_ADMIN_TOKEN"].present?
-        push_url = ENV["VOLPINO_URL"] + "/claims"
+        push_url = "#{ENV['VOLPINO_URL']}/claims"
         doi = doi_from_url(iiitem["subj_id"])
         orcid = orcid_from_url(iiitem["obj_id"])
         source_id = iiitem["source_id"] == "datacite_orcid_auto_update" ? "orcid_update" : "orcid_search"
diff --git a/app/models/orcid_affiliation.rb b/app/models/orcid_affiliation.rb
index 24753914..51c86d1c 100644
--- a/app/models/orcid_affiliation.rb
+++ b/app/models/orcid_affiliation.rb
@@ -1,24 +1,28 @@
 class OrcidAffiliation < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      OrcidAffiliationImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      OrcidAffiliationImportByMonthJob.perform_later(
+        from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"),
+      )
     end
 
-    "Queued import for DOIs created from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     orcid_affiliation = OrcidAffiliation.new
-    orcid_affiliation.queue_jobs(orcid_affiliation.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    orcid_affiliation.queue_jobs(orcid_affiliation.unfreeze(
+                                   from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                                 ))
   end
 
   def source_id
@@ -29,17 +33,17 @@ def query
     "creators.nameIdentifiers.nameIdentifierScheme:ORCID +creators.affiliation.affiliationIdentifierScheme:ROR"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
 
     Array.wrap(items).map do |item|
-      begin
-        OrcidAffiliationImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      OrcidAffiliationImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     items.length
@@ -47,18 +51,23 @@ def push_data(result, options={})
 
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
-    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers", nil))
+    related_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                      nil))
     skip_doi = related_identifiers.any? do |related_identifier|
-      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf", "IsVersionOf"].include?(related_identifier["relatedIdentifierType"])
+      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf",
+       "IsVersionOf"].include?(related_identifier["relatedIdentifierType"])
     end
 
     total_push_items = []
 
     attributes.fetch("creators", []).map do |creator|
-      name_identifier = Array.wrap(creator.fetch("nameIdentifiers", nil)).find { |n| n["nameIdentifierScheme"] == "ORCID" }
+      name_identifier = Array.wrap(creator.fetch("nameIdentifiers",
+                                                 nil)).detect do |n|
+        n["nameIdentifierScheme"] == "ORCID"
+      end
       skip_orcid = name_identifier.blank?
 
-      affiliation_identifiers = Array.wrap(creator).reduce([]) do |sum, c| 
+      affiliation_identifiers = Array.wrap(creator).reduce([]) do |sum, c|
         Array.wrap(c["affiliation"]).each do |a|
           sum << a["affiliationIdentifier"] if a["affiliationIdentifierScheme"] == "ROR"
         end
@@ -71,8 +80,8 @@ def self.push_item(item)
       subj_id = normalize_orcid(name_identifier["nameIdentifier"])
       source_id = item.fetch("sourceId", "orcid_affiliation")
       relation_type_id = "is_affiliated_with"
-      source_token = ENV['ORCID_AFFILIATION_SOURCE_TOKEN']
-      
+      source_token = ENV["ORCID_AFFILIATION_SOURCE_TOKEN"]
+
       push_items = Array.wrap(affiliation_identifiers).reduce([]) do |ssum, iitem|
         obj_id = normalize_ror(iitem)
 
@@ -92,17 +101,17 @@ def self.push_item(item)
                     "subj" => subj,
                     "obj" => obj }
         end
-        
+
         ssum
       end
 
       # there can be one or more affiliation_identifier per DOI
       Array.wrap(push_items).each do |iiitem|
         # send to DataCite Event Data API
-        if ENV['STAFF_ADMIN_TOKEN'].present?
-          push_url = ENV['LAGOTTINO_URL'] + "/events"
+        if ENV["STAFF_ADMIN_TOKEN"].present?
+          push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-          data = { 
+          data = {
             "data" => {
               "type" => "events",
               "attributes" => {
@@ -116,12 +125,15 @@ def self.push_item(item)
                 "timestamp" => iiitem["timestamp"],
                 "license" => iiitem["license"],
                 "subj" => iiitem["subj"],
-                "obj" => iiitem["obj"] } }}
+                "obj" => iiitem["obj"],
+              },
+            },
+          }
 
           response = Maremma.post(push_url, data: data.to_json,
-                                            bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                            content_type: 'application/vnd.api+json',
-                                            accept: 'application/vnd.api+json; version=2')
+                                            bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                            content_type: "application/vnd.api+json",
+                                            accept: "application/vnd.api+json; version=2")
 
           if [200, 201].include?(response.status)
             Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
@@ -141,23 +153,24 @@ def self.push_item(item)
   end
 
   def self.get_ror_metadata(id)
-    return {} unless id.present?
+    return {} if id.blank?
 
-    url = "https://api.ror.org/organizations/" + id[8..-1]
+    url = "https://api.ror.org/organizations/#{id[8..-1]}"
     response = Maremma.get(url, host: true)
     return {} if response.status != 200
 
     message = response.body.fetch("data", {})
-    
-    location = { 
+
+    location = {
       "type" => "postalAddress",
-      "addressCountry" => message.dig("country", "country_name")
+      "addressCountry" => message.dig("country", "country_name"),
     }
-    
+
     {
       "@id" => id,
       "@type" => "Organization",
       "name" => message["name"],
-      "location" => location }.compact
+      "location" => location,
+    }.compact
   end
 end
diff --git a/app/models/orcid_claim.rb b/app/models/orcid_claim.rb
index 8ae9f742..b7da37bb 100644
--- a/app/models/orcid_claim.rb
+++ b/app/models/orcid_claim.rb
@@ -1,41 +1,44 @@
 class OrcidClaim < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      OrcidClaimImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      OrcidClaimImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                               until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for claims created from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for claims created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     orcid_claim = OrcidClaim.new
-    orcid_claim.queue_jobs(orcid_claim.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    orcid_claim.queue_jobs(orcid_claim.unfreeze(
+                             from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                           ))
   end
 
   def source_id
     "datacite_orcid_search_link"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.dig("data", "response", "docs")
 
     Array.wrap(items).map do |item|
-      begin
-        NameIdentifierImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      NameIdentifierImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     items.length
@@ -46,17 +49,18 @@ def self.push_item(item)
     pid = normalize_doi(doi)
     related_identifiers = item.fetch("relatedIdentifier", [])
     skip_doi = related_identifiers.any? do |related_identifier|
-      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf", "IsVersionOf"].include?(related_identifier.split(':', 3).first)
+      ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf",
+       "IsVersionOf"].include?(related_identifier.split(":", 3).first)
     end
     name_identifiers = item.fetch("nameIdentifier", [])
     return nil if name_identifiers.blank? || skip_doi
 
     source_id = item.fetch("sourceId", "datacite_orcid_auto_update")
     relation_type_id = "is_authored_by"
-    source_token = ENV['DATACITE_ORCID_AUTO_UPDATE_SOURCE_TOKEN']
-    
+    source_token = ENV["DATACITE_ORCID_AUTO_UPDATE_SOURCE_TOKEN"]
+
     push_items = Array.wrap(name_identifiers).reduce([]) do |ssum, iitem|
-      name_identifier_scheme, name_identifier = iitem.split(':', 2)
+      name_identifier_scheme, name_identifier = iitem.split(":", 2)
       name_identifier = name_identifier.strip
       obj_id = normalize_orcid(name_identifier)
 
@@ -76,17 +80,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # there can be one or more name_identifier per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Datas API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -100,12 +104,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/related_arxiv.rb b/app/models/related_arxiv.rb
index ac37f854..1eddd407 100644
--- a/app/models/related_arxiv.rb
+++ b/app/models/related_arxiv.rb
@@ -1,24 +1,27 @@
 class RelatedArxiv < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedArxivImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedArxivImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                 until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for DOIs updated from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     related_arxiv = RelatedArxiv.new
-    related_arxiv.queue_jobs(related_arxiv.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    related_arxiv.queue_jobs(related_arxiv.unfreeze(
+                               from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                             ))
   end
 
   def source_id
@@ -29,18 +32,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:arXiv"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
-    
+
     Array.wrap(items).map do |item|
-      begin
-        RelatedArxivImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        logger = Logger.new(STDOUT)
-        logger.error error.message
-      end
+      RelatedArxivImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      logger = Logger.new($stdout)
+      logger.error e.message
     end
 
     items.length
@@ -49,16 +52,19 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_arxivs = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "arXiv" }
-    
+    related_arxivs = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                 nil)).select do |r|
+      r["relatedIdentifierType"] == "arXiv"
+    end
+
     push_items = Array.wrap(related_arxivs).reduce([]) do |ssum, iitem|
       related_arxiv = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
       obj_id = normalize_arxiv(related_arxiv)
       source_id = "datacite_arxiv"
-      source_token = ENV['DATACITE_ARXIV_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_ARXIV_SOURCE_TOKEN"]
 
       # only create event if valid arXiv ID
       if obj_id.present?
@@ -76,17 +82,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => {} }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_arxiv per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -100,12 +106,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                         bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                         content_type: 'application/vnd.api+json',
-                                         accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/related_handle.rb b/app/models/related_handle.rb
index f9c0a16e..8aa0e81a 100644
--- a/app/models/related_handle.rb
+++ b/app/models/related_handle.rb
@@ -1,24 +1,27 @@
 class RelatedHandle < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedHandleImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedHandleImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                  until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for DOIs updated from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     related_handle = RelatedHandle.new
-    related_handle.queue_jobs(related_handle.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    related_handle.queue_jobs(related_handle.unfreeze(
+                                from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                              ))
   end
 
   def source_id
@@ -29,18 +32,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:Handle"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
-    
+
     Array.wrap(items).map do |item|
-      begin
-        RelatedHandleImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        logger = Logger.new(STDOUT)
-        logger.error error.message
-      end
+      RelatedHandleImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      logger = Logger.new($stdout)
+      logger.error e.message
     end
 
     items.length
@@ -49,15 +52,18 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_handles = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "Handle" }
+    related_handles = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                  nil)).select do |r|
+      r["relatedIdentifierType"] == "Handle"
+    end
     push_items = Array.wrap(related_handles).reduce([]) do |ssum, iitem|
       related_handle = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
       obj_id = normalize_handle(related_handle)
       source_id = "datacite_handle"
-      source_token = ENV['DATACITE_HANDLE_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_HANDLE_SOURCE_TOKEN"]
 
       # only create event if valid http/https/ftp URL
       if obj_id.present?
@@ -75,17 +81,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => {} }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_handle per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -99,12 +105,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                         bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                         content_type: 'application/vnd.api+json',
-                                         accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/related_identifier.rb b/app/models/related_identifier.rb
index e59a4eae..44dbd065 100644
--- a/app/models/related_identifier.rb
+++ b/app/models/related_identifier.rb
@@ -1,29 +1,33 @@
 class RelatedIdentifier < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
   include Helpable
   include Cacheable
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
     resource_type_id = options[:resource_type_id].present? || ""
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedIdentifierImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"), resource_type_id: resource_type_id)
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedIdentifierImportByMonthJob.perform_later(
+        from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"), resource_type_id: resource_type_id,
+      )
     end
 
-    "Queued import for DOIs created from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
     resource_type_id = options[:resource_type_id].present? || ""
 
     related_identifier = RelatedIdentifier.new
-    related_identifier.queue_jobs(related_identifier.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), resource_type_id: resource_type_id))
+    related_identifier.queue_jobs(related_identifier.unfreeze(
+                                    from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), resource_type_id: resource_type_id,
+                                  ))
   end
 
   def source_id
@@ -34,18 +38,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:DOI"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
     # Rails.logger.info "Extracting related identifiers for #{items.size} DOIs created from #{options[:from_date]} until #{options[:until_date]}."
 
     Array.wrap(items).map do |item|
-      begin
-        RelatedIdentifierImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        Rails.logger.error error.message
-      end
+      RelatedIdentifierImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      Rails.logger.error e.message
     end
 
     items.length
@@ -57,31 +61,38 @@ def self.push_item(item)
     return nil unless doi.present? && cached_doi_ra(doi) == "DataCite"
 
     pid = normalize_doi(doi)
-    related_doi_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "DOI" }
+    related_doi_identifiers = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                          nil)).select do |r|
+      r["relatedIdentifierType"] == "DOI"
+    end
     registration_agencies = {}
 
     push_items = Array.wrap(related_doi_identifiers).reduce([]) do |ssum, iitem|
-      related_identifier = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
+      related_identifier = iitem.fetch("relatedIdentifier",
+                                       nil).to_s.strip.downcase
       obj_id = normalize_doi(related_identifier)
       prefix = validate_prefix(related_identifier)
-      registration_agencies[prefix] = cached_doi_ra(related_identifier) unless registration_agencies[prefix]
+      unless registration_agencies[prefix]
+        registration_agencies[prefix] =
+          cached_doi_ra(related_identifier)
+      end
 
       if registration_agencies[prefix].nil?
         Rails.logger.error "No DOI registration agency for prefix #{prefix} found."
         source_id = "datacite_related"
-        source_token = ENV['DATACITE_RELATED_SOURCE_TOKEN']
+        source_token = ENV["DATACITE_RELATED_SOURCE_TOKEN"]
         obj = {}
       elsif registration_agencies[prefix] == "DataCite"
         source_id = "datacite_related"
-        source_token = ENV['DATACITE_RELATED_SOURCE_TOKEN']
+        source_token = ENV["DATACITE_RELATED_SOURCE_TOKEN"]
         obj = cached_datacite_response(obj_id)
       elsif registration_agencies[prefix] == "Crossref"
         source_id = "datacite_crossref"
-        source_token = ENV['DATACITE_CROSSREF_SOURCE_TOKEN']
+        source_token = ENV["DATACITE_CROSSREF_SOURCE_TOKEN"]
         obj = cached_crossref_response(obj_id)
       elsif registration_agencies[prefix].present?
         source_id = "datacite_#{registration_agencies[prefix].downcase}"
-        source_token = ENV['DATACITE_OTHER_SOURCE_TOKEN']
+        source_token = ENV["DATACITE_OTHER_SOURCE_TOKEN"]
         obj = {}
       end
 
@@ -101,17 +112,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => obj }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_identifier per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "id" => iiitem["id"],
@@ -126,13 +137,16 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                          bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                          content_type: 'application/vnd.api+json',
-                                          accept: 'application/vnd.api+json; version=2')
-                                
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
+
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
         elsif response.status == 409
@@ -142,27 +156,27 @@ def self.push_item(item)
           Rails.logger.error data.inspect
         end
       end
-      
+
       # send to Event Data Bus
-      if ENV['EVENTDATA_TOKEN'].present?
+      if ENV["EVENTDATA_TOKEN"].present?
         iiitem = set_event_for_bus(iiitem)
-       
-        host = ENV['EVENTDATA_URL']
-        push_url = host + "/events"
+
+        host = ENV["EVENTDATA_URL"]
+        push_url = "#{host}/events"
         response = Maremma.post(push_url, data: iiitem.to_json,
-                                          bearer: ENV['EVENTDATA_TOKEN'],
-                                          content_type: 'json',
+                                          bearer: ENV["EVENTDATA_TOKEN"],
+                                          content_type: "json",
                                           host: host)
 
         # return 0 if successful, 1 if error
-          if response.status == 201
-            Rails.logger.info "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
-          elsif response.status == 409
-            Rails.logger.info "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} already pushed to Event Data service."
-          elsif response.body["errors"].present?
-            Rails.logger.error "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} had an error:"
-            Rails.logger.error "[Event Data Bus] #{response.body['errors'].first['title']}"
-          end
+        if response.status == 201
+          Rails.logger.info "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
+        elsif response.status == 409
+          Rails.logger.info "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} already pushed to Event Data service."
+        elsif response.body["errors"].present?
+          Rails.logger.error "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} had an error:"
+          Rails.logger.error "[Event Data Bus] #{response.body['errors'].first['title']}"
+        end
       else
         Rails.logger.info "[Event Data Bus] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} was not sent to Event Data Bus."
       end
diff --git a/app/models/related_igsn.rb b/app/models/related_igsn.rb
index 86f2ac30..2d121fcd 100644
--- a/app/models/related_igsn.rb
+++ b/app/models/related_igsn.rb
@@ -1,24 +1,27 @@
 class RelatedIgsn < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedIgsnImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedIgsnImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for DOIs updated from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     related_igsn = RelatedIgsn.new
-    related_igsn.queue_jobs(related_igsn.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    related_igsn.queue_jobs(related_igsn.unfreeze(
+                              from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                            ))
   end
 
   def source_id
@@ -29,18 +32,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:IGSN"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
-    
+
     Array.wrap(items).map do |item|
-      begin
-        RelatedIgsnImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        logger = Logger.new(STDOUT)
-        logger.error error.message
-      end
+      RelatedIgsnImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      logger = Logger.new($stdout)
+      logger.error e.message
     end
 
     items.length
@@ -49,16 +52,19 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_igsns = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "IGSN" }
-    
+    related_igsns = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                nil)).select do |r|
+      r["relatedIdentifierType"] == "IGSN"
+    end
+
     push_items = Array.wrap(related_igsns).reduce([]) do |ssum, iitem|
       related_igsn = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
       obj_id = normalize_igsn(related_igsn)
       source_id = "datacite_igsn"
-      source_token = ENV['DATACITE_IGSN_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_IGSN_SOURCE_TOKEN"]
 
       # only create event if valid http/https/ftp URL
       if obj_id.present?
@@ -76,17 +82,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => {} }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_igsn per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -100,12 +106,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                         bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                         content_type: 'application/vnd.api+json',
-                                         accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/related_pmid.rb b/app/models/related_pmid.rb
index 7e65f61d..70f673b1 100644
--- a/app/models/related_pmid.rb
+++ b/app/models/related_pmid.rb
@@ -1,24 +1,27 @@
 class RelatedPmid < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedPmidImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedPmidImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                                until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for DOIs updated from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     related_pmid = RelatedPmid.new
-    related_pmid.queue_jobs(related_pmid.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    related_pmid.queue_jobs(related_pmid.unfreeze(
+                              from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                            ))
   end
 
   def source_id
@@ -29,18 +32,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:PMID"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
-    
+
     Array.wrap(items).map do |item|
-      begin
-        RelatedPmidImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        logger = Logger.new(STDOUT)
-        logger.error error.message
-      end
+      RelatedPmidImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      logger = Logger.new($stdout)
+      logger.error e.message
     end
 
     items.length
@@ -49,15 +52,18 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_pmids = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "PMID" }
+    related_pmids = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                                nil)).select do |r|
+      r["relatedIdentifierType"] == "PMID"
+    end
     push_items = Array.wrap(related_pmids).reduce([]) do |ssum, iitem|
       related_pmid = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
       obj_id = normalize_pmid(related_pmid)
       source_id = "datacite_pmid"
-      source_token = ENV['DATACITE_PMID_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_PMID_SOURCE_TOKEN"]
 
       # only create event if valid http/https/ftp URL
       if obj_id.present?
@@ -75,17 +81,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => {} }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_pmid per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -99,12 +105,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                         bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                         content_type: 'application/vnd.api+json',
-                                         accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/related_url.rb b/app/models/related_url.rb
index 6d4fb226..6b7ce5ef 100644
--- a/app/models/related_url.rb
+++ b/app/models/related_url.rb
@@ -1,24 +1,27 @@
 class RelatedUrl < Base
-  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/"
+  LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze
 
-  def self.import_by_month(options={})
+  def self.import_by_month(options = {})
     from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month
     until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month
 
     # get first day of every month between from_date and until_date
-    (from_date..until_date).select {|d| d.day == 1}.each do |m|
-      RelatedUrlImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F"))
+    (from_date..until_date).select { |d| d.day == 1 }.each do |m|
+      RelatedUrlImportByMonthJob.perform_later(from_date: m.strftime("%F"),
+                                               until_date: m.end_of_month.strftime("%F"))
     end
 
-    "Queued import for DOIs updated from #{from_date.strftime("%F")} until #{until_date.strftime("%F")}."
+    "Queued import for DOIs updated from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}."
   end
 
-  def self.import(options={})
+  def self.import(options = {})
     from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day
     until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current
 
     related_url = RelatedUrl.new
-    related_url.queue_jobs(related_url.unfreeze(from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F")))
+    related_url.queue_jobs(related_url.unfreeze(
+                             from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"),
+                           ))
   end
 
   def source_id
@@ -29,18 +32,18 @@ def query
     "relatedIdentifiers.relatedIdentifierType:URL"
   end
 
-  def push_data(result, options={})
-    return result.body.fetch("errors") if result.body.fetch("errors", nil).present?
+  def push_data(result, _options = {})
+    return result.body.fetch("errors") if result.body.fetch("errors",
+                                                            nil).present?
 
     items = result.body.fetch("data", [])
-    
+
     Array.wrap(items).map do |item|
-      begin
-        RelatedUrlImportJob.perform_later(item)
-      rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => error
-        logger = Logger.new(STDOUT)
-        logger.error error.message
-      end
+      RelatedUrlImportJob.perform_later(item)
+    rescue Aws::SQS::Errors::InvalidParameterValue,
+           Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e
+      logger = Logger.new($stdout)
+      logger.error e.message
     end
 
     items.length
@@ -49,15 +52,18 @@ def push_data(result, options={})
   def self.push_item(item)
     attributes = item.fetch("attributes", {})
     doi = attributes.fetch("doi", nil)
-    return nil unless doi.present?
+    return nil if doi.blank?
 
     pid = normalize_doi(doi)
-    related_urls = Array.wrap(attributes.fetch("relatedIdentifiers", nil)).select { |r| r["relatedIdentifierType"] == "URL" }
+    related_urls = Array.wrap(attributes.fetch("relatedIdentifiers",
+                                               nil)).select do |r|
+      r["relatedIdentifierType"] == "URL"
+    end
     push_items = Array.wrap(related_urls).reduce([]) do |ssum, iitem|
       related_url = iitem.fetch("relatedIdentifier", nil).to_s.strip.downcase
       obj_id = normalize_url(related_url)
       source_id = "datacite_url"
-      source_token = ENV['DATACITE_URL_SOURCE_TOKEN']
+      source_token = ENV["DATACITE_URL_SOURCE_TOKEN"]
 
       # only create event if valid http/https/ftp URL
       if obj_id.present?
@@ -75,17 +81,17 @@ def self.push_item(item)
                   "subj" => subj,
                   "obj" => {} }
       end
-      
+
       ssum
     end
 
     # there can be one or more related_url per DOI
     Array.wrap(push_items).each do |iiitem|
       # send to DataCite Event Data Query API
-      if ENV['STAFF_ADMIN_TOKEN'].present?
-        push_url = ENV['LAGOTTINO_URL'] + "/events"
+      if ENV["STAFF_ADMIN_TOKEN"].present?
+        push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
-        data = { 
+        data = {
           "data" => {
             "type" => "events",
             "attributes" => {
@@ -99,12 +105,15 @@ def self.push_item(item)
               "timestamp" => iiitem["timestamp"],
               "license" => iiitem["license"],
               "subj" => iiitem["subj"],
-              "obj" => iiitem["obj"] } }}
+              "obj" => iiitem["obj"],
+            },
+          },
+        }
 
         response = Maremma.post(push_url, data: data.to_json,
-                                         bearer: ENV['STAFF_ADMIN_TOKEN'],
-                                         content_type: 'application/vnd.api+json',
-                                         accept: 'application/vnd.api+json; version=2')
+                                          bearer: ENV["STAFF_ADMIN_TOKEN"],
+                                          content_type: "application/vnd.api+json",
+                                          accept: "application/vnd.api+json; version=2")
 
         if [200, 201].include?(response.status)
           Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service."
diff --git a/app/models/report.rb b/app/models/report.rb
index 8c0ebdbf..222e7db0 100644
--- a/app/models/report.rb
+++ b/app/models/report.rb
@@ -1,25 +1,29 @@
 class Report < Base
-  attr_reader :data, :header, :release, :report_id, :type, :errors, :datasets, :subsets, :report_url
+  attr_reader :data, :header, :release, :report_id, :type, :errors, :datasets,
+              :subsets, :report_url
+
   include Parserable
 
-  COMPRESSED_HASH_MESSAGE = {"code"=>69, "severity"=>"warning", "message"=>"Report is compressed using gzip", "help-url"=>"https://github.com/datacite/sashimi", "data"=>"usage data needs to be uncompressed"}
+  COMPRESSED_HASH_MESSAGE = { "code" => 69, "severity" => "warning",
+                              "message" => "Report is compressed using gzip", "help-url" => "https://github.com/datacite/sashimi", "data" => "usage data needs to be uncompressed" }.freeze
 
-  def initialize report, options={}
-    @errors = report.body.fetch("errors") if report.body.fetch("errors", nil).present?
+  def initialize(report, _options = {})
+    @errors = report.body.fetch("errors") if report.body.fetch("errors",
+                                                               nil).present?
     return @errors if report.body.fetch("errors", nil).present?
-    return [{ "errors" => { "title" => "The report is blank" }}] if report.body.blank?
+    return [{ "errors" => { "title" => "The report is blank" } }] if report.body.blank?
 
     @data = report.body.fetch("data", {})
-    @header = @data.dig("report","report-header")
-    @release = @header.dig("release")
-    @datasets = @data.dig("report","report-datasets")
-    @subsets = @data.dig("report","report-subsets")
-    @report_id = @data.dig("report","id")
+    @header = @data.dig("report", "report-header")
+    @release = @header["release"]
+    @datasets = @data.dig("report", "report-datasets")
+    @subsets = @data.dig("report", "report-subsets")
+    @report_id = @data.dig("report", "id")
     @report_url = report.url
     @type = get_type
   end
 
-  def self.parse_multi_subset_report report
+  def self.parse_multi_subset_report(report)
     subset = report.subsets.last
 
     compressed = decode_report subset["gzip"]
@@ -34,65 +38,67 @@ def self.parse_multi_subset_report report
             "https://api.stage.datacite.org/reports/#{report.report_id}"
           end
     dataset_array.map do |dataset|
-      args = {header: report.header, url: url}
+      args = { header: report.header, url: url }
       UsageUpdateParseJob.perform_later(dataset, args)
     end
     dataset_array
   end
-  
-  def self.parse_normal_report report
-    json = report.data.dig("report","report-datasets")
+
+  def self.parse_normal_report(report)
+    json = report.data.dig("report", "report-datasets")
     # hsh = parse_subset json
     json.map do |dataset|
-      args = {header: report.header, url: report.report_url}
+      args = { header: report.header, url: report.report_url }
       UsageUpdateParseJob.perform_later(dataset, args)
     end
     # UsageUpdateParseJob.perform_async(report.report_url, json)
     json
   end
 
-  def self.translate_datasets items, options
+  def self.translate_datasets(items, options)
     return [] if items.nil?
+
     # return @errors if @data.nil?
     # return @errors if @errors
 
     Array.wrap(items).reduce([]) do |x, item|
-      data = { 
-        doi: item.dig("dataset-id").first.dig("value"), 
-        id: normalize_doi(item.dig("dataset-id").first.dig("value")),
-        created: options[:header].fetch("created"), 
+      data = {
+        doi: item["dataset-id"].first["value"],
+        id: normalize_doi(item["dataset-id"].first["value"]),
+        created: options[:header].fetch("created"),
         report_url: options[:url],
-        created_at: options[:header].dig("reporting-period","begin-date")
+        created_at: options[:header].dig("reporting-period", "begin-date"),
       }
       instances = item.dig("performance", 0, "instance")
 
       return x += [OpenStruct.new(body: { "errors" => "There are too many instances in #{data[:doi]} for report #{options[:url]}. There can only be 4" })] if instances.size > 8
-   
+
       x += Array.wrap(instances).reduce([]) do |ssum, instance|
-        data[:count] = instance.dig("count")
-        event_type = "#{instance.dig("metric-type")}-#{instance.dig("access-method")}"
+        data[:count] = instance["count"]
+        event_type = "#{instance['metric-type']}-#{instance['access-method']}"
         ssum << UsageUpdate.format_event(event_type, data, options)
         ssum
       end
-    end    
+    end
   end
 
   def get_type
     return "compressed" if compressed_report?
+
     "normal"
   end
 
   def compressed_report?
     # puts @data.dig("report","report-header","exceptions")
-    return nil unless @data.dig("report","report-header","exceptions").present?
-    return nil unless @data.dig("report","report-header","exceptions").any?
+    return nil if @data.dig("report", "report-header",
+                            "exceptions").blank?
+    return nil unless @data.dig("report", "report-header", "exceptions").any?
+
     # @data.dig("report","report-header","exceptions").include?(COMPRESSED_HASH_MESSAGE)
-    exceptions = @data.dig("report","report-header","exceptions") 
-    code = exceptions.first.fetch("code","")
+    exceptions = @data.dig("report", "report-header", "exceptions")
+    code = exceptions.first.fetch("code", "")
     if code == 69
       true
-    else
-      nil
     end
   end
 
diff --git a/app/models/usage_update.rb b/app/models/usage_update.rb
index 81a1673a..9c10bd80 100644
--- a/app/models/usage_update.rb
+++ b/app/models/usage_update.rb
@@ -33,7 +33,7 @@ def self.import_by_year(options = {})
     # get first day of every month between from_date and until_date
     (from_date..until_date).each do |year|
       meta = Maremma.get(get_query_url("year" => year, size: 25))
-      total_pages = meta.body.dig("data","meta", "total-pages")
+      total_pages = meta.body.dig("data", "meta", "total-pages")
       (1..total_pages).each do |m|
         UsageUpdateImportByYearJob.perform_later(number: m)
       end
@@ -57,14 +57,13 @@ def self.get_data(report_url, _options = {})
     return OpenStruct.new(body: { "errors" => "No Report given given" }) if report_url.blank?
 
     host = URI.parse(report_url).host.downcase
-    report = Maremma.get(report_url, timeout: 120, host: host)
-    report
+    Maremma.get(report_url, timeout: 120, host: host)
   end
 
   def self.import_reports(options = {})
     reports = Maremma.get(get_query_url(options))
-    reports.body.dig("data").fetch("reports",[]).each do |report|
-      ReportImportJob.perform_later(url + "/" + report.fetch("id", nil))
+    reports.body["data"].fetch("reports", []).each do |report|
+      ReportImportJob.perform_later("#{url}/#{report.fetch('id', nil)}")
     end
   end
 
@@ -78,11 +77,11 @@ def self.get_query_url(options = {})
       "page[size]" => options[:size],
       "year" => options[:year],
     }
-    url + "?" + URI.encode_www_form(params)
+    "#{url}?#{URI.encode_www_form(params)}"
   end
 
   def self.url
-    ENV["SASHIMI_QUERY_URL"] + "/reports"
+    "#{ENV['SASHIMI_QUERY_URL']}/reports"
   end
 
   def self.grab_record(sqs_msg: nil, data: nil)
@@ -147,7 +146,7 @@ def self.push_item(item, options = {})
     end
 
     data = wrap_event item, options
-    push_url = ENV["LAGOTTINO_URL"] + "/events"
+    push_url = "#{ENV['LAGOTTINO_URL']}/events"
 
     response = Maremma.post(push_url, data: data.to_json,
                                       bearer: ENV["STAFF_ADMIN_TOKEN"],
diff --git a/app/models/user.rb b/app/models/user.rb
index 4e4df218..0320e36f 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -2,7 +2,8 @@ class User
   # include jwt encode and decode
   include Authenticable
 
-  attr_accessor :name, :uid, :email, :role_id, :jwt, :orcid, :provider_id, :client_id
+  attr_accessor :name, :uid, :email, :role_id, :jwt, :orcid, :provider_id,
+                :client_id
 
   def initialize(token)
     if token.present?
diff --git a/app/serializers/doi_serializer.rb b/app/serializers/doi_serializer.rb
index 8b7b4c1c..b3c9f595 100644
--- a/app/serializers/doi_serializer.rb
+++ b/app/serializers/doi_serializer.rb
@@ -1,5 +1,6 @@
 class DoiSerializer < ActiveModel::Serializer
-  attributes :title, :description, :member_type, :region, :country, :year, :logo_url, :email, :website, :phone, :created, :updated
+  attributes :title, :description, :member_type, :region, :country, :year,
+             :logo_url, :email, :website, :phone, :created, :updated
 
   def id
     object.id.downcase
diff --git a/app/validators/uniqueness_validator.rb b/app/validators/uniqueness_validator.rb
index dc728dc5..a18c619a 100644
--- a/app/validators/uniqueness_validator.rb
+++ b/app/validators/uniqueness_validator.rb
@@ -1,6 +1,9 @@
 class UniquenessValidator < ActiveModel::Validator
   def validate(record)
-    result = record.class.find_by_id(record.symbol)
-    record.errors.add(:symbol, "This ID has already been taken") if result.present?
+    result = record.class.find_by(id: record.symbol)
+    if result.present?
+      record.errors.add(:symbol,
+                        "This ID has already been taken")
+    end
   end
 end
diff --git a/app/workers/doi_import_worker.rb b/app/workers/doi_import_worker.rb
index 2eddda4e..238eafba 100644
--- a/app/workers/doi_import_worker.rb
+++ b/app/workers/doi_import_worker.rb
@@ -1,7 +1,7 @@
 class DoiImportWorker
   include Shoryuken::Worker
 
-  shoryuken_options queue: ->{ "#{ENV['RAILS_ENV']}_doi" }, auto_delete: true
+  shoryuken_options queue: -> { "#{ENV['RAILS_ENV']}_doi" }, auto_delete: true
 
   def perform(sqs_msg, data)
     Doi.parse_record(sqs_msg: sqs_msg, data: JSON.parse(data))
diff --git a/config/application.rb b/config/application.rb
index e79db4f5..dffa9c28 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -13,9 +13,9 @@
 Bundler.require(*Rails.groups)
 
 # load ENV variables from .env file if it exists
-env_file = File.expand_path("../../.env", __FILE__)
+env_file = File.expand_path("../.env", __dir__)
 if File.exist?(env_file)
-  require 'dotenv'
+  require "dotenv"
   Dotenv.load! env_file
 end
 
@@ -28,28 +28,28 @@
 end
 
 # default values for some ENV variables
-ENV['APPLICATION'] ||= "levriero"
-ENV['MEMCACHE_SERVERS'] ||= "memcached:11211"
-ENV['SITE_TITLE'] ||= "DataCite Event Data Agents"
-ENV['LOG_LEVEL'] ||= "info"
-ENV['CONCURRENCY'] ||= "25"
-ENV['GITHUB_URL'] ||= "https://github.com/datacite/levriero"
-ENV['ORCID_API_URL'] ||= "https://pub.orcid.org/v2.1"
-ENV['API_URL'] ||= "https://api.stage.datacite.org"
-ENV['VOLPINO_URL'] ||= "https://api.stage.datacite.org"
-ENV['LAGOTTINO_URL'] ||= "https://api.stage.datacite.org"
-ENV['SASHIMI_QUERY_URL'] ||= "https://api.stage.datacite.org"
-ENV['EVENTDATA_URL'] ||= "https://bus-staging.eventdata.crossref.org"
-ENV['CROSSREF_QUERY_URL'] ||= "https://api.eventdata.crossref.org"
-ENV['TRUSTED_IP'] ||= "10.0.40.1"
-ENV['SLACK_WEBHOOK_URL'] ||= ""
-ENV['USER_AGENT'] ||= "Mozilla/5.0 (compatible; Maremma/#{Maremma::VERSION}; mailto:info@datacite.org)"
+ENV["APPLICATION"] ||= "levriero"
+ENV["MEMCACHE_SERVERS"] ||= "memcached:11211"
+ENV["SITE_TITLE"] ||= "DataCite Event Data Agents"
+ENV["LOG_LEVEL"] ||= "info"
+ENV["CONCURRENCY"] ||= "25"
+ENV["GITHUB_URL"] ||= "https://github.com/datacite/levriero"
+ENV["ORCID_API_URL"] ||= "https://pub.orcid.org/v2.1"
+ENV["API_URL"] ||= "https://api.stage.datacite.org"
+ENV["VOLPINO_URL"] ||= "https://api.stage.datacite.org"
+ENV["LAGOTTINO_URL"] ||= "https://api.stage.datacite.org"
+ENV["SASHIMI_QUERY_URL"] ||= "https://api.stage.datacite.org"
+ENV["EVENTDATA_URL"] ||= "https://bus-staging.eventdata.crossref.org"
+ENV["CROSSREF_QUERY_URL"] ||= "https://api.eventdata.crossref.org"
+ENV["TRUSTED_IP"] ||= "10.0.40.1"
+ENV["SLACK_WEBHOOK_URL"] ||= ""
+ENV["USER_AGENT"] ||= "Mozilla/5.0 (compatible; Maremma/#{Maremma::VERSION}; mailto:info@datacite.org)"
 
 module Levriero
   class Application < Rails::Application
     # Initialize configuration defaults for originally generated Rails version.
     config.load_defaults 5.1
-    config.autoload_paths << Rails.root.join('lib')
+    config.autoload_paths << Rails.root.join("lib")
     config.autoload_paths << Rails.root.join("app", "models", "concerns")
 
     # Settings in config/environments/* take precedence over those specified here.
@@ -72,7 +72,8 @@ class Application < Rails::Application
     config.logger = config.lograge.logger        ## LogStashLogger needs to be pass to rails logger, see roidrage/lograge#26
     config.log_level = ENV["LOG_LEVEL"].to_sym   ## Log level in a config level configuration
 
-    config.lograge.ignore_actions = ["HeartbeatController#index", "IndexController#index"]
+    config.lograge.ignore_actions = ["HeartbeatController#index",
+                                     "IndexController#index"]
     config.lograge.ignore_custom = lambda do |event|
       event.payload.inspect.length > 100000
     end
@@ -93,14 +94,15 @@ class Application < Rails::Application
     config.middleware.use Rack::Deflater
 
     # make sure all input is UTF-8
-    config.middleware.insert 0, Rack::UTF8Sanitizer, additional_content_types: ['application/vnd.api+json', 'application/xml']
+    config.middleware.insert 0, Rack::UTF8Sanitizer,
+                             additional_content_types: ["application/vnd.api+json", "application/xml"]
 
     # set Active Job queueing backend
-    if ENV["AWS_REGION"]
-      config.active_job.queue_adapter = :shoryuken
-    else
-      config.active_job.queue_adapter = :inline
-    end
+    config.active_job.queue_adapter = if ENV["AWS_REGION"]
+                                        :shoryuken
+                                      else
+                                        :inline
+                                      end
     config.active_job.queue_name_prefix = Rails.env
 
     config.generators do |g|
diff --git a/config/boot.rb b/config/boot.rb
index 4423c97f..d2ebcbca 100644
--- a/config/boot.rb
+++ b/config/boot.rb
@@ -1,4 +1,4 @@
-ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
+ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__)
 
-require 'bundler/setup' # Set up gems listed in the Gemfile.
+require "bundler/setup" # Set up gems listed in the Gemfile.
 # require 'bootsnap/setup' # Speed up boot time by caching expensive operations.
diff --git a/config/environment.rb b/config/environment.rb
index e5f472f8..e035ef4b 100644
--- a/config/environment.rb
+++ b/config/environment.rb
@@ -1,5 +1,5 @@
 # Load the Rails application.
-require_relative 'application'
+require_relative "application"
 
 # Initialize the Rails application.
 Rails.application.initialize!
diff --git a/config/environments/development.rb b/config/environments/development.rb
index 24b9b848..def16fac 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -39,4 +39,4 @@
   config.file_watcher = ActiveSupport::EventedFileUpdateChecker
 end
 
-BetterErrors::Middleware.allow_ip! ENV['TRUSTED_IP']
+BetterErrors::Middleware.allow_ip! ENV["TRUSTED_IP"]
diff --git a/config/environments/test.rb b/config/environments/test.rb
index 2fbe7a66..221bacd8 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -18,7 +18,7 @@
   # Configure public file server for tests with Cache-Control for performance.
   config.public_file_server.enabled = true
   config.public_file_server.headers = {
-    'Cache-Control' => "public, max-age=#{1.hour.seconds.to_i}"
+    "Cache-Control" => "public, max-age=#{1.hour.seconds.to_i}",
   }
 
   # Show full error reports and disable caching.
diff --git a/config/initializers/_shoryuken.rb b/config/initializers/_shoryuken.rb
index f4f5603c..c872e467 100644
--- a/config/initializers/_shoryuken.rb
+++ b/config/initializers/_shoryuken.rb
@@ -5,12 +5,10 @@ module Shoryuken
   module Middleware
     module Server
       class RavenReporter
-        def call(worker_instance, queue, sqs_msg, body)
-          tags = { job: body['job_class'], queue: queue }
+        def call(_worker_instance, queue, _sqs_msg, body, &block)
+          tags = { job: body["job_class"], queue: queue }
           context = { message: body }
-          Raven.capture(tags: tags, extra: context) do
-            yield
-          end
+          Raven.capture(tags: tags, extra: context, &block)
         end
       end
     end
diff --git a/config/initializers/_version.rb b/config/initializers/_version.rb
index 886bf473..ad071711 100644
--- a/config/initializers/_version.rb
+++ b/config/initializers/_version.rb
@@ -1,7 +1,7 @@
 module Levriero
   class Application
     g = Git.open(Rails.root)
-    VERSION = g.tags.map { |t| Gem::Version.new(t.name) }.sort.last.to_s
-    REVISION = g.object('HEAD').sha
+    VERSION = g.tags.map { |t| Gem::Version.new(t.name) }.max.to_s
+    REVISION = g.object("HEAD").sha
   end
-end
\ No newline at end of file
+end
diff --git a/config/initializers/active_model_serializers.rb b/config/initializers/active_model_serializers.rb
index f6579bff..bf1107e7 100644
--- a/config/initializers/active_model_serializers.rb
+++ b/config/initializers/active_model_serializers.rb
@@ -5,5 +5,5 @@
 ActiveModelSerializers.config.jsonapi_pagination_links_enabled = false
 
 ActiveSupport.on_load(:action_controller) do
-  require 'active_model_serializers/register_jsonapi_renderer'
+  require "active_model_serializers/register_jsonapi_renderer"
 end
diff --git a/config/initializers/api_pagination.rb b/config/initializers/api_pagination.rb
index eda17004..0f7eed01 100644
--- a/config/initializers/api_pagination.rb
+++ b/config/initializers/api_pagination.rb
@@ -1,5 +1,4 @@
 ApiPagination.configure do |config|
- 
   config.page_param do |params|
     if params[:page].is_a? ActionController::Parameters
       params[:page][:number]
@@ -15,5 +14,4 @@
       params[:per_page]
     end
   end
-
 end
diff --git a/config/initializers/constants.rb b/config/initializers/constants.rb
index fff2850b..eed7d328 100644
--- a/config/initializers/constants.rb
+++ b/config/initializers/constants.rb
@@ -2,37 +2,37 @@
                         CanCan::AuthorizationNotPerformed,
                         JWT::VerificationError,
                         JSON::ParserError,
-                        #AbstractController::ActionNotFound,
+                        # AbstractController::ActionNotFound,
                         ActionController::RoutingError,
                         ActionController::ParameterMissing,
                         ActionController::UnpermittedParameters,
-                        NoMethodError]
+                        NoMethodError].freeze
 
 # Format used for DOI validation
 # The prefix is 10.x where x is 4-5 digits. The suffix can be anything, but can"t be left off
-DOI_FORMAT = %r(\A10\.\d{4,5}/.+)
+DOI_FORMAT = %r(\A10\.\d{4,5}/.+).freeze
 
 # Format used for URL validation
-URL_FORMAT = %r(\A(http|https|ftp):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?\z)
+URL_FORMAT = %r(\A(http|https|ftp)://[a-z0-9]+([\-.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?/.*)?\z).freeze
 
 # Form queue options
-QUEUE_OPTIONS = ["high", "default", "low"]
+QUEUE_OPTIONS = ["high", "default", "low"].freeze
 
 # Version of ORCID API
-ORCID_VERSION = '1.2'
+ORCID_VERSION = "1.2".freeze
 
 # ORCID schema
-ORCID_SCHEMA = 'https://raw.githubusercontent.com/ORCID/ORCID-Source/master/orcid-model/src/main/resources/orcid-message-1.2.xsd'
+ORCID_SCHEMA = "https://raw.githubusercontent.com/ORCID/ORCID-Source/master/orcid-model/src/main/resources/orcid-message-1.2.xsd".freeze
 
 # Version of DataCite API
-DATACITE_VERSION = "4"
+DATACITE_VERSION = "4".freeze
 
 # Date of DataCite Schema
-DATACITE_SCHEMA_DATE = "2016-09-21"
+DATACITE_SCHEMA_DATE = "2016-09-21".freeze
 
 # regions used by countries gem
 REGIONS = {
   "APAC" => "Asia and Pacific",
   "EMEA" => "Europe, Middle East and Africa",
-  "AMER" => "Americas"
-}
+  "AMER" => "Americas",
+}.freeze
diff --git a/config/initializers/cors.rb b/config/initializers/cors.rb
index b584542f..5e4c830d 100644
--- a/config/initializers/cors.rb
+++ b/config/initializers/cors.rb
@@ -7,10 +7,10 @@
 
 Rails.application.config.middleware.insert_before 0, Rack::Cors do
   allow do
-    origins '*'
+    origins "*"
 
-    resource '*',
-      headers: :any,
-      methods: [:get, :post, :put, :patch, :delete, :options, :head]
+    resource "*",
+             headers: :any,
+             methods: %i[get post put patch delete options head]
   end
-end
\ No newline at end of file
+end
diff --git a/config/initializers/inflections.rb b/config/initializers/inflections.rb
index 50f55ac6..df649fa8 100644
--- a/config/initializers/inflections.rb
+++ b/config/initializers/inflections.rb
@@ -2,5 +2,5 @@
 # are locale specific, and you may define rules for as many different
 # locales as you wish. All of these examples are active by default:
 ActiveSupport::Inflector.inflections(:en) do |inflect|
-  inflect.uncountable %w( status heartbeat metadata media )
+  inflect.uncountable %w(status heartbeat metadata media)
 end
diff --git a/config/initializers/json_param_key_transform.rb b/config/initializers/json_param_key_transform.rb
index a5a4f892..312ae567 100644
--- a/config/initializers/json_param_key_transform.rb
+++ b/config/initializers/json_param_key_transform.rb
@@ -1,9 +1,9 @@
 # Transform JSON request param keys from JSON-conventional camelCase to
 # Rails-conventional snake_case:
-ActionDispatch::Request.parameter_parsers[:json] = -> (raw_post) {
+ActionDispatch::Request.parameter_parsers[:json] = ->(raw_post) {
   # Modified from action_dispatch/http/parameters.rb
   data = ActiveSupport::JSON.decode(raw_post)
-  data = {:_json => data} unless data.is_a?(Hash)
+  data = { _json: data } unless data.is_a?(Hash)
 
   # Transform camelCase param keys to snake_case:
   data.deep_transform_keys!(&:underscore)
diff --git a/config/initializers/kaminari_config.rb b/config/initializers/kaminari_config.rb
index 9469bc7f..7e901579 100644
--- a/config/initializers/kaminari_config.rb
+++ b/config/initializers/kaminari_config.rb
@@ -3,4 +3,4 @@
   config.max_per_page = 1000
 end
 
-Kaminari::Hooks.init if defined?(Kaminari::Hooks)
\ No newline at end of file
+Kaminari::Hooks.init if defined?(Kaminari::Hooks)
diff --git a/config/initializers/sentry.rb b/config/initializers/sentry.rb
index 02aef0d0..27d19316 100644
--- a/config/initializers/sentry.rb
+++ b/config/initializers/sentry.rb
@@ -1,5 +1,5 @@
 Raven.configure do |config|
   config.dsn = ENV["SENTRY_DSN"]
-  config.release = "levriero:" + Levriero::Application::VERSION
+  config.release = "levriero:#{Levriero::Application::VERSION}"
   config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s)
-end
\ No newline at end of file
+end
diff --git a/config/routes.rb b/config/routes.rb
index 76de3b34..a34c99ac 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -1,14 +1,14 @@
 Rails.application.routes.draw do
-  root to: 'index#index'
+  root to: "index#index"
 
   resources :heartbeat, only: [:index]
-  resources :index, path: '/', only: [:index]
+  resources :index, path: "/", only: [:index]
 
   # trigger agents
-  post 'agents/crossref', to: 'agents#crossref'
-  post 'agents/crossref-orcid', to: 'agents#crossref_orcid'
-  post 'agents/crossref-funder', to: 'agents#crossref_funder'
-  post 'agents/crossref-related', to: 'agents#crossref_related'
+  post "agents/crossref", to: "agents#crossref"
+  post "agents/crossref-orcid", to: "agents#crossref_orcid"
+  post "agents/crossref-funder", to: "agents#crossref_funder"
+  post "agents/crossref-related", to: "agents#crossref_related"
 
   # rescue routing errors
   # match "*path", to: "index#routing_error", via: :all
diff --git a/lib/tasks/affiliation_identifier.rake b/lib/tasks/affiliation_identifier.rake
index a48ce9c1..9d6cc478 100644
--- a/lib/tasks/affiliation_identifier.rake
+++ b/lib/tasks/affiliation_identifier.rake
@@ -1,19 +1,21 @@
 namespace :affiliation_identifier do
-  desc 'Import all affiliation_identifiers by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all affiliation_identifiers by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = AffiliationIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+    response = AffiliationIdentifier.import_by_month(from_date: from_date,
+                                                     until_date: until_date)
     puts response
   end
 
-  desc 'Import all affiliation_identifiers'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all affiliation_identifiers"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = AffiliationIdentifier.import(from_date: from_date, until_date: until_date)
+    response = AffiliationIdentifier.import(from_date: from_date,
+                                            until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/crossref.rake b/lib/tasks/crossref.rake
index add62690..f4b5910a 100644
--- a/lib/tasks/crossref.rake
+++ b/lib/tasks/crossref.rake
@@ -1,19 +1,20 @@
 namespace :crossref do
-  desc 'Import all crossref events by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all crossref events by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = Crossref.import_by_month(from_date: from_date, until_date: until_date)
+    response = Crossref.import_by_month(from_date: from_date,
+                                        until_date: until_date)
     puts response
   end
 
-  desc 'Import all crossref events'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all crossref events"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = Crossref.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
   end
-end
\ No newline at end of file
+end
diff --git a/lib/tasks/crossref_funder.rake b/lib/tasks/crossref_funder.rake
index 041fe080..67feeeca 100644
--- a/lib/tasks/crossref_funder.rake
+++ b/lib/tasks/crossref_funder.rake
@@ -1,19 +1,21 @@
 namespace :crossref_funder do
-  desc 'Import all crossref_funder links by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all crossref_funder links by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = CrossrefFunder.import_by_month(from_date: from_date, until_date: until_date)
+    response = CrossrefFunder.import_by_month(from_date: from_date,
+                                              until_date: until_date)
     puts response
   end
 
-  desc 'Import all crossref_funder links'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all crossref_funder links"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = CrossrefFunder.import(from_date: from_date, until_date: until_date)
+    response = CrossrefFunder.import(from_date: from_date,
+                                     until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/crossref_import.rake b/lib/tasks/crossref_import.rake
index eb45749c..060de490 100644
--- a/lib/tasks/crossref_import.rake
+++ b/lib/tasks/crossref_import.rake
@@ -1,19 +1,21 @@
 namespace :crossref_import do
-  desc 'Import all crossref_import links by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all crossref_import links by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = CrossrefImport.import_by_month(from_date: from_date, until_date: until_date)
+    response = CrossrefImport.import_by_month(from_date: from_date,
+                                              until_date: until_date)
     puts response
   end
 
-  desc 'Import all crossref_import links'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all crossref_import links"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = CrossrefImport.import(from_date: from_date, until_date: until_date)
+    response = CrossrefImport.import(from_date: from_date,
+                                     until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/crossref_orcid.rake b/lib/tasks/crossref_orcid.rake
index ab53b25d..47bbad16 100644
--- a/lib/tasks/crossref_orcid.rake
+++ b/lib/tasks/crossref_orcid.rake
@@ -1,19 +1,21 @@
 namespace :crossref_orcid do
-  desc 'Import all crossref_orcid links by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all crossref_orcid links by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = CrossrefOrcid.import_by_month(from_date: from_date, until_date: until_date)
+    response = CrossrefOrcid.import_by_month(from_date: from_date,
+                                             until_date: until_date)
     puts response
   end
 
-  desc 'Import all crossref_orcid links'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all crossref_orcid links"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = CrossrefOrcid.import(from_date: from_date, until_date: until_date)
+    response = CrossrefOrcid.import(from_date: from_date,
+                                    until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/crossref_related.rake b/lib/tasks/crossref_related.rake
index c7065ed0..715e106d 100644
--- a/lib/tasks/crossref_related.rake
+++ b/lib/tasks/crossref_related.rake
@@ -1,19 +1,21 @@
 namespace :crossref_related do
-  desc 'Import all references by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all references by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = CrossrefRelated.import_by_month(from_date: from_date, until_date: until_date)
+    response = CrossrefRelated.import_by_month(from_date: from_date,
+                                               until_date: until_date)
     puts response
   end
 
-  desc 'Import all references'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all references"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = CrossrefRelated.import(from_date: from_date, until_date: until_date)
+    response = CrossrefRelated.import(from_date: from_date,
+                                      until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/funder_identifier.rake b/lib/tasks/funder_identifier.rake
index 22a9be57..eb483c94 100644
--- a/lib/tasks/funder_identifier.rake
+++ b/lib/tasks/funder_identifier.rake
@@ -1,19 +1,21 @@
 namespace :funder_identifier do
-  desc 'Import all funder_identifiers by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all funder_identifiers by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = FunderIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+    response = FunderIdentifier.import_by_month(from_date: from_date,
+                                                until_date: until_date)
     puts response
   end
 
-  desc 'Import all funder_identifiers'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all funder_identifiers"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = FunderIdentifier.import(from_date: from_date, until_date: until_date)
+    response = FunderIdentifier.import(from_date: from_date,
+                                       until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
-end
\ No newline at end of file
+end
diff --git a/lib/tasks/memcached.rake b/lib/tasks/memcached.rake
index f3673b71..f33476c6 100644
--- a/lib/tasks/memcached.rake
+++ b/lib/tasks/memcached.rake
@@ -1,6 +1,6 @@
 namespace :memcached do
-  desc 'Clears the Rails cache'
-  task :flush => :environment do
+  desc "Clears the Rails cache"
+  task flush: :environment do
     Rails.cache.clear
   end
 end
diff --git a/lib/tasks/name_identifier.rake b/lib/tasks/name_identifier.rake
index 0d1b07cf..09fccb2c 100644
--- a/lib/tasks/name_identifier.rake
+++ b/lib/tasks/name_identifier.rake
@@ -1,29 +1,31 @@
 namespace :name_identifier do
-  desc 'Import all name_identifiers by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all name_identifiers by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = NameIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+    response = NameIdentifier.import_by_month(from_date: from_date,
+                                              until_date: until_date)
     puts response
   end
 
-  desc 'Import all name_identifiers'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all name_identifiers"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = NameIdentifier.import(from_date: from_date, until_date: until_date)
+    response = NameIdentifier.import(from_date: from_date,
+                                     until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 
-  task :import_one => :environment do
+  task import_one: :environment do
     if ENV["DOI"].nil?
       puts "ENV['DOI'] is required."
       exit
     end
 
-    response = NameIdentifier.import_one(doi: ENV['DOI'])
+    response = NameIdentifier.import_one(doi: ENV["DOI"])
     puts "Import for #{response} DOI #{ENV['DOI']}"
   end
-end
\ No newline at end of file
+end
diff --git a/lib/tasks/orcid_affiliation.rake b/lib/tasks/orcid_affiliation.rake
index 1a03febd..084ae3f7 100644
--- a/lib/tasks/orcid_affiliation.rake
+++ b/lib/tasks/orcid_affiliation.rake
@@ -1,19 +1,21 @@
 namespace :orcid_affiliation do
-  desc 'Import all orcid_affiliations by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all orcid_affiliations by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = OrcidAffiliation.import_by_month(from_date: from_date, until_date: until_date)
+    response = OrcidAffiliation.import_by_month(from_date: from_date,
+                                                until_date: until_date)
     puts response
   end
 
-  desc 'Import all orcid_affiliations'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all orcid_affiliations"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = OrcidAffiliation.import(from_date: from_date, until_date: until_date)
+    response = OrcidAffiliation.import(from_date: from_date,
+                                       until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/orcid_claim.rake b/lib/tasks/orcid_claim.rake
index d4cba157..c2b60f4a 100644
--- a/lib/tasks/orcid_claim.rake
+++ b/lib/tasks/orcid_claim.rake
@@ -1,19 +1,20 @@
 namespace :orcid_claim do
-  desc 'Import all orcid claims by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all orcid claims by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = OrcidClaim.import_by_month(from_date: from_date, until_date: until_date)
+    response = OrcidClaim.import_by_month(from_date: from_date,
+                                          until_date: until_date)
     puts response
   end
 
-  desc 'Import all orcid claims'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all orcid claims"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = OrcidClaim.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} claims created from #{from_date} - #{until_date}."
   end
-end
\ No newline at end of file
+end
diff --git a/lib/tasks/related_arxiv.rake b/lib/tasks/related_arxiv.rake
index 45fd0a09..881eac7a 100644
--- a/lib/tasks/related_arxiv.rake
+++ b/lib/tasks/related_arxiv.rake
@@ -1,17 +1,18 @@
 namespace :related_arxiv do
-  desc 'Import all related_arxivs by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all related_arxivs by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = RelatedArxiv.import_by_month(from_date: from_date, until_date: until_date)
+    response = RelatedArxiv.import_by_month(from_date: from_date,
+                                            until_date: until_date)
     puts response
   end
 
-  desc 'Import all related_arxivs'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_arxivs"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = RelatedArxiv.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
diff --git a/lib/tasks/related_handle.rake b/lib/tasks/related_handle.rake
index cbe69b07..cfdf14be 100644
--- a/lib/tasks/related_handle.rake
+++ b/lib/tasks/related_handle.rake
@@ -1,19 +1,21 @@
 namespace :related_handle do
-  desc 'Import all related_handles by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all related_handles by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = RelatedHandle.import_by_month(from_date: from_date, until_date: until_date)
+    response = RelatedHandle.import_by_month(from_date: from_date,
+                                             until_date: until_date)
     puts response
   end
 
-  desc 'Import all related_handles'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_handles"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = RelatedHandle.import(from_date: from_date, until_date: until_date)
+    response = RelatedHandle.import(from_date: from_date,
+                                    until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/related_identifier.rake b/lib/tasks/related_identifier.rake
index 3709ab1d..248eebeb 100644
--- a/lib/tasks/related_identifier.rake
+++ b/lib/tasks/related_identifier.rake
@@ -1,20 +1,22 @@
 namespace :related_identifier do
-  desc 'Import all related_identifiers by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
-    resource_type_id = ENV['RESOURCE_TYPES'] || ""
+  desc "Import all related_identifiers by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
+    resource_type_id = ENV["RESOURCE_TYPES"] || ""
 
-    response = RelatedIdentifier.import_by_month(from_date: from_date, until_date: until_date, resource_type_id: resource_type_id)
+    response = RelatedIdentifier.import_by_month(from_date: from_date,
+                                                 until_date: until_date, resource_type_id: resource_type_id)
     puts response
   end
 
-  desc 'Import all related_identifiers'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_identifiers"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
-    response = RelatedIdentifier.import(from_date: from_date, until_date: until_date)
+    response = RelatedIdentifier.import(from_date: from_date,
+                                        until_date: until_date)
     puts "Queued import for #{response} DOIs created from #{from_date} - #{until_date}."
   end
 end
diff --git a/lib/tasks/related_igsn.rake b/lib/tasks/related_igsn.rake
index a5edbaca..aca67f83 100644
--- a/lib/tasks/related_igsn.rake
+++ b/lib/tasks/related_igsn.rake
@@ -1,17 +1,18 @@
 namespace :related_igsn do
-  desc 'Import all related_igsns by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all related_igsns by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = RelatedIgsn.import_by_month(from_date: from_date, until_date: until_date)
+    response = RelatedIgsn.import_by_month(from_date: from_date,
+                                           until_date: until_date)
     puts response
   end
 
-  desc 'Import all related_igsns'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_igsns"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = RelatedIgsn.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
diff --git a/lib/tasks/related_pmid.rake b/lib/tasks/related_pmid.rake
index d1f2947e..2cddf032 100644
--- a/lib/tasks/related_pmid.rake
+++ b/lib/tasks/related_pmid.rake
@@ -1,17 +1,18 @@
 namespace :related_pmid do
-  desc 'Import all related_pmids by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all related_pmids by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = RelatedPmid.import_by_month(from_date: from_date, until_date: until_date)
+    response = RelatedPmid.import_by_month(from_date: from_date,
+                                           until_date: until_date)
     puts response
   end
 
-  desc 'Import all related_pmids'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_pmids"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = RelatedPmid.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
diff --git a/lib/tasks/related_url.rake b/lib/tasks/related_url.rake
index 7951dd90..82faa3ef 100644
--- a/lib/tasks/related_url.rake
+++ b/lib/tasks/related_url.rake
@@ -1,17 +1,18 @@
 namespace :related_url do
-  desc 'Import all related_urls by month'
-  task :import_by_month => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_month.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_month.strftime("%F")
+  desc "Import all related_urls by month"
+  task import_by_month: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_month.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_month.strftime("%F")
 
-    response = RelatedUrl.import_by_month(from_date: from_date, until_date: until_date)
+    response = RelatedUrl.import_by_month(from_date: from_date,
+                                          until_date: until_date)
     puts response
   end
 
-  desc 'Import all related_urls'
-  task :import => :environment do
-    from_date = ENV['FROM_DATE'] || (Date.current - 1.day).strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.strftime("%F")
+  desc "Import all related_urls"
+  task import: :environment do
+    from_date = ENV["FROM_DATE"] || (Date.current - 1.day).strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.strftime("%F")
 
     response = RelatedUrl.import(from_date: from_date, until_date: until_date)
     puts "Queued import for #{response} DOIs updated from #{from_date} - #{until_date}."
diff --git a/lib/tasks/usage_update.rake b/lib/tasks/usage_update.rake
index 3f9badcb..ede2e275 100644
--- a/lib/tasks/usage_update.rake
+++ b/lib/tasks/usage_update.rake
@@ -1,11 +1,11 @@
 namespace :usage_update do
-  desc 'Import all usage_updates by year'
-  task :import_by_year => :environment do
-    from_date = ENV['FROM_DATE'] || Date.current.beginning_of_year.strftime("%F")
-    until_date = ENV['UNTIL_DATE'] || Date.current.end_of_year.strftime("%F")
+  desc "Import all usage_updates by year"
+  task import_by_year: :environment do
+    from_date = ENV["FROM_DATE"] || Date.current.beginning_of_year.strftime("%F")
+    until_date = ENV["UNTIL_DATE"] || Date.current.end_of_year.strftime("%F")
 
-    response = UsageUpdate.import_by_year(from_date: from_date, until_date: until_date)
+    response = UsageUpdate.import_by_year(from_date: from_date,
+                                          until_date: until_date)
     puts response
   end
-
-end
\ No newline at end of file
+end
diff --git a/spec/concerns/authenticable_spec.rb b/spec/concerns/authenticable_spec.rb
index ec10dbc5..4b2b9f26 100644
--- a/spec/concerns/authenticable_spec.rb
+++ b/spec/concerns/authenticable_spec.rb
@@ -1,10 +1,10 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe User, type: :model do
   let(:token) { User.generate_token }
   subject { User.new(token) }
 
-  describe 'decode_token' do
+  describe "decode_token" do
     it "has name" do
       payload = subject.decode_token(token)
       expect(payload["name"]).to eq("Josiah Carberry")
@@ -21,7 +21,7 @@
     end
   end
 
-  describe 'encode_token' do
+  describe "encode_token" do
     it "with name" do
       token = subject.encode_token("name" => "Josiah Carberry")
       expect(token).to start_with("eyJhbG")
diff --git a/spec/concerns/delegatable_spec.rb b/spec/concerns/delegatable_spec.rb
index 83be7705..e8cc8944 100644
--- a/spec/concerns/delegatable_spec.rb
+++ b/spec/concerns/delegatable_spec.rb
@@ -7,20 +7,17 @@
 #   let(:params)  { {year: 2008} }
 #   let(:params2)  { {year: clients.first.created.year} }
 
-
 #   describe "dois_count_by_client" do
 
-#     before do 
-#         Provider.create(provider)         
-#         Client.create(client) 
+#     before do
+#         Provider.create(provider)
+#         Client.create(client)
 #         sleep 2
 #       end
 
 #     it "should return OK response" do
 #         r = model.dois_count_by_client "clients/tib.tib"
 
-
-
 #     end
 #     # it "should return formatted counts" do
 #     #   client = clients.first
@@ -48,4 +45,4 @@
 # #             expect(facet.first[:count]).to eq(5)
 # #         end
 # #     end
-# # end
\ No newline at end of file
+# # end
diff --git a/spec/concerns/facetable_spec.rb b/spec/concerns/facetable_spec.rb
index a093f32a..cbda9bd1 100644
--- a/spec/concerns/facetable_spec.rb
+++ b/spec/concerns/facetable_spec.rb
@@ -1,7 +1,6 @@
+require "rails_helper"
 
-require 'rails_helper'
-
-describe 'Clients', elasticsearch: true, type: :controller do
+describe "Clients", elasticsearch: true, type: :controller do
   # let!(:provider) { build(:provider) }
   # let(:model) { ClientsController.new }
   # let!(:clients)  { build_list(:client, 5, provider_id: provider) }
@@ -10,11 +9,10 @@
   # let!(:params3)  { {year: nil} }
   # let!(:ids)  { clients.first.symbol+","+clients.last.symbol }
 
-
   # describe "facet by year" do
-  #   # context "this" do 
-  #     before do 
-  #       Provider.create(provider)         
+  #   # context "this" do
+  #     before do
+  #       Provider.create(provider)
   #       clients.each { |item| Client.create(item) }
   #       # dois.each   { |item| Doi.create(item) }
   #       sleep 2
@@ -42,9 +40,9 @@
   # end
 
   # describe "filter_by_ids" do
-  #   context "this" do 
-  #     before do 
-  #       Provider.create(provider)         
+  #   context "this" do
+  #     before do
+  #       Provider.create(provider)
   #       clients.each { |item| Client.create(item) }
   #       # dois.each   { |item| Doi.create(item) }
   #       sleep 1
diff --git a/spec/concerns/helpable_spec.rb b/spec/concerns/helpable_spec.rb
index 18453952..00e0d389 100644
--- a/spec/concerns/helpable_spec.rb
+++ b/spec/concerns/helpable_spec.rb
@@ -1,18 +1,18 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe RelatedIdentifier, type: :model do
-
   it "format event for eventdata bus" do
-    event = File.read(fixture_path + 'datacite_event.json')
+    event = File.read("#{fixture_path}datacite_event.json")
     response = RelatedIdentifier.set_event_for_bus(JSON.parse(event))
     expect(response["id"]).to eq("c8bcd46c-3433-47ac-b8db-d039ce346d65")
     expect(response["source_id"]).to eq("datacite")
     expect(response["subj_id"]).to eq("https://doi.org/10.15468/dl.hy9tqg")
     expect(response["relation_type_id"]).to eq("references")
     expect(response["source_token"]).to eq("29a9a478-518f-4cbd-a133-a0dcef63d547")
-    expect(response["obj"]).to eq({"pid"=>"https://doi.org/10.15468/xgoxap", "work_type_id"=>"Dataset"})
-    expect(response["subj"]).to eq({"pid"=>"https://doi.org/10.15468/dl.hy9tqg", "work_type_id"=>"Dataset"})
+    expect(response["obj"]).to eq({ "pid" => "https://doi.org/10.15468/xgoxap",
+                                    "work_type_id" => "Dataset" })
+    expect(response["subj"]).to eq({
+                                     "pid" => "https://doi.org/10.15468/dl.hy9tqg", "work_type_id" => "Dataset"
+                                   })
   end
 end
-
-
diff --git a/spec/concerns/importable_spec.rb b/spec/concerns/importable_spec.rb
index 948b37a7..7863cc2d 100644
--- a/spec/concerns/importable_spec.rb
+++ b/spec/concerns/importable_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "Importable", vcr: true do
   # context "Doi" do
@@ -79,7 +79,8 @@
   describe "to_kebab_case" do
     it "converts" do
       hsh = { "provider-id" => "bl", "country-code" => "GB" }
-      expect(Doi.to_kebab_case(hsh)).to eq("provider_id"=>"bl", "country_code"=>"GB")
+      expect(Doi.to_kebab_case(hsh)).to eq("provider_id" => "bl",
+                                           "country_code" => "GB")
     end
   end
 
diff --git a/spec/factories/default.rb b/spec/factories/default.rb
index b85450c0..fc8fc4dd 100644
--- a/spec/factories/default.rb
+++ b/spec/factories/default.rb
@@ -1,11 +1,13 @@
-require 'faker'
+require "faker"
 
 FactoryBot.define do
   factory :doi, class: OpenStruct do
-    doi { ("10.14454/" + Faker::Internet.password(8)).downcase }
-    url {Faker::Internet.url }
+    doi { "10.14454/#{Faker::Internet.password(8)}".downcase }
+    url { Faker::Internet.url }
     is_active { true }
-    xml  { "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9InllcyI/PjxyZXNvdXJjZSB4c2k6c2NoZW1hTG9jYXRpb249Imh0dHA6Ly9kYXRhY2l0ZS5vcmcvc2NoZW1hL2tlcm5lbC0zIGh0dHA6Ly9zY2hlbWEuZGF0YWNpdGUub3JnL21ldGEva2VybmVsLTMvbWV0YWRhdGEueHNkIiB4bWxucz0iaHR0cDovL2RhdGFjaXRlLm9yZy9zY2hlbWEva2VybmVsLTMiIHhtbG5zOnhzaT0iaHR0cDovL3d3dy53My5vcmcvMjAwMS9YTUxTY2hlbWEtaW5zdGFuY2UiPjxpZGVudGlmaWVyIGlkZW50aWZpZXJUeXBlPSJET0kiPjEwLjUyNTYvZjEwMDByZXNlYXJjaC44NTcwLnI2NDIwPC9pZGVudGlmaWVyPjxjcmVhdG9ycz48Y3JlYXRvcj48Y3JlYXRvck5hbWU+ZCBzPC9jcmVhdG9yTmFtZT48L2NyZWF0b3I+PC9jcmVhdG9ycz48dGl0bGVzPjx0aXRsZT5SZWZlcmVlIHJlcG9ydC4gRm9yOiBSRVNFQVJDSC0zNDgyIFt2ZXJzaW9uIDU7IHJlZmVyZWVzOiAxIGFwcHJvdmVkLCAxIGFwcHJvdmVkIHdpdGggcmVzZXJ2YXRpb25zXTwvdGl0bGU+PC90aXRsZXM+PHB1Ymxpc2hlcj5GMTAwMCBSZXNlYXJjaCBMaW1pdGVkPC9wdWJsaXNoZXI+PHB1YmxpY2F0aW9uWWVhcj4yMDE3PC9wdWJsaWNhdGlvblllYXI+PHJlc291cmNlVHlwZSByZXNvdXJjZVR5cGVHZW5lcmFsPSJUZXh0Ii8+PC9yZXNvdXJjZT4=" }
+    xml do
+      "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9InllcyI/PjxyZXNvdXJjZSB4c2k6c2NoZW1hTG9jYXRpb249Imh0dHA6Ly9kYXRhY2l0ZS5vcmcvc2NoZW1hL2tlcm5lbC0zIGh0dHA6Ly9zY2hlbWEuZGF0YWNpdGUub3JnL21ldGEva2VybmVsLTMvbWV0YWRhdGEueHNkIiB4bWxucz0iaHR0cDovL2RhdGFjaXRlLm9yZy9zY2hlbWEva2VybmVsLTMiIHhtbG5zOnhzaT0iaHR0cDovL3d3dy53My5vcmcvMjAwMS9YTUxTY2hlbWEtaW5zdGFuY2UiPjxpZGVudGlmaWVyIGlkZW50aWZpZXJUeXBlPSJET0kiPjEwLjUyNTYvZjEwMDByZXNlYXJjaC44NTcwLnI2NDIwPC9pZGVudGlmaWVyPjxjcmVhdG9ycz48Y3JlYXRvcj48Y3JlYXRvck5hbWU+ZCBzPC9jcmVhdG9yTmFtZT48L2NyZWF0b3I+PC9jcmVhdG9ycz48dGl0bGVzPjx0aXRsZT5SZWZlcmVlIHJlcG9ydC4gRm9yOiBSRVNFQVJDSC0zNDgyIFt2ZXJzaW9uIDU7IHJlZmVyZWVzOiAxIGFwcHJvdmVkLCAxIGFwcHJvdmVkIHdpdGggcmVzZXJ2YXRpb25zXTwvdGl0bGU+PC90aXRsZXM+PHB1Ymxpc2hlcj5GMTAwMCBSZXNlYXJjaCBMaW1pdGVkPC9wdWJsaXNoZXI+PHB1YmxpY2F0aW9uWWVhcj4yMDE3PC9wdWJsaWNhdGlvblllYXI+PHJlc291cmNlVHlwZSByZXNvdXJjZVR5cGVHZW5lcmFsPSJUZXh0Ii8+PC9yZXNvdXJjZT4="
+    end
     aasm_state { "draft" }
     created_at { Faker::Time.between(DateTime.now - 2, DateTime.now) }
     updated_at { created_at }
@@ -16,18 +18,26 @@
 
   factory :event, class: OpenStruct do
     sequence(:uuid) { |n| "#{SecureRandom.uuid}-#{n}" }
-    message_action { "create" } 
+    message_action { "create" }
     sequence(:obj_id) { |n| "#{Faker::Internet.url}#{n}" }
     sequence(:subj_id) { |n| "#{Faker::Internet.url}#{n}" }
     total { Faker::Number.number(digits: 3) }
-    subj {{
-      "id" => "#{SecureRandom.uuid}",
-      "issued" => Faker::Time.between(from: DateTime.now - 2, to: DateTime.now),
-    }}
-    relation_type_id { ["total-dataset-investigations-regular","total-dataset-investigations-machine","unique-dataset-investigations-machine","total-dataset-investigations-machine"].sample }
+    subj do
+      {
+        "id" => SecureRandom.uuid.to_s,
+        "issued" => Faker::Time.between(from: DateTime.now - 2,
+                                        to: DateTime.now),
+      }
+    end
+    relation_type_id do
+      ["total-dataset-investigations-regular",
+       "total-dataset-investigations-machine", "unique-dataset-investigations-machine", "total-dataset-investigations-machine"].sample
+    end
     source_id { "datacite-usage" }
     sequence(:source_token) { |n| "#{SecureRandom.uuid}-#{n}" }
-    occurred_at { Faker::Time.between(from: DateTime.now - 2, to: DateTime.now) }
+    occurred_at do
+      Faker::Time.between(from: DateTime.now - 2, to: DateTime.now)
+    end
     license { "https://creativecommons.org/publicdomain/zero/1.0/" }
   end
 end
diff --git a/spec/jobs/import_job_spec.rb b/spec/jobs/import_job_spec.rb
index 1e8e4ad8..e87c099f 100644
--- a/spec/jobs/import_job_spec.rb
+++ b/spec/jobs/import_job_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 # describe ImportJob, type: :job do
 #   context "Client" do
diff --git a/spec/jobs/usage_update_export_job_spec.rb b/spec/jobs/usage_update_export_job_spec.rb
index 5728ff33..e5ab1e42 100644
--- a/spec/jobs/usage_update_export_job_spec.rb
+++ b/spec/jobs/usage_update_export_job_spec.rb
@@ -1,25 +1,20 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe UsageUpdateExportJob, type: :job, vcr: true do
   include ActiveJob::TestHelper
 
-
   context "Client" do
-    let(:item) { create(:event).to_json  }
+    let(:item) { create(:event).to_json }
     subject(:job) { UsageUpdateExportJob.perform_later(item) }
 
-    it 'queues the job' do
-      expect { job }.to have_enqueued_job(UsageUpdateExportJob)
-        .on_queue("test_levriero_usage")
+    it "queues the job" do
+      expect { job }.to have_enqueued_job(UsageUpdateExportJob).
+        on_queue("test_levriero_usage")
     end
 
     # it 'performs' do
     #   expect(UsageUpdate).to receive(:item).with(item)
     #   UsageUpdateExportJob.perform_now(item)
     # end
-
   end
 end
-
-
-
diff --git a/spec/jobs/usage_update_parse_job_spec.rb b/spec/jobs/usage_update_parse_job_spec.rb
index 1ca61491..3e9a4747 100644
--- a/spec/jobs/usage_update_parse_job_spec.rb
+++ b/spec/jobs/usage_update_parse_job_spec.rb
@@ -1,43 +1,53 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe UsageUpdateParseJob, type: :job, vcr: true do
   include ActiveJob::TestHelper
 
   context "Client" do
-    let(:item) { "https://api.stage.datacite.org/reports/d4cccd37-9044-4c59-85d4-f2063ce361cd" }
-    let(:body) { File.read(fixture_path + 'usage_update_3.json') }
-    let(:result) { OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/d4cccd37-9044-4c59-85d4-f2063ce361cd") }
+    let(:item) do
+      "https://api.stage.datacite.org/reports/d4cccd37-9044-4c59-85d4-f2063ce361cd"
+    end
+    let(:body) { File.read("#{fixture_path}usage_update_3.json") }
+    let(:result) do
+      OpenStruct.new(body: JSON.parse(body),
+                     url: "https://api.stage.datacite.org/reports/d4cccd37-9044-4c59-85d4-f2063ce361cd")
+    end
     let(:report) { Report.new(result) }
-    let(:args) {{ header: report.header, url:report.report_url }}
+    let(:args) { { header: report.header, url: report.report_url } }
     subject(:job) { UsageUpdateParseJob.perform_later(report.datasets, args) }
 
-    it 'queues the job' do
-      expect { job }.to have_enqueued_job(UsageUpdateParseJob)
-        .on_queue("test_levriero_usage")
+    it "queues the job" do
+      expect { job }.to have_enqueued_job(UsageUpdateParseJob).
+        on_queue("test_levriero_usage")
     end
 
-    it 'execute further call' do
+    it "execute further call" do
       response = perform_enqueued_jobs do
-        UsageUpdateParseJob.new.perform(report.datasets,args)
+        UsageUpdateParseJob.new.perform(report.datasets, args)
       end
       expect(response).not_to be_a(Hash)
     end
   end
 
   context "not existing report" do
-    let(:item) { "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1-95cf-ba2f475cbfad" }
-    let(:body)   {File.read(fixture_path + 'usage_update_3.json')}
-    let(:result) {OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1-95cf-ba2f475cbfad"  )}
-    let(:report) {Report.new(result)}
-    let(:args) {{header: report.header, url:report.report_url}}
-    subject(:job) { UsageUpdateParseJob.perform_later(report.datasets,args) }
+    let(:item) do
+      "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1-95cf-ba2f475cbfad"
+    end
+    let(:body)   { File.read("#{fixture_path}usage_update_3.json") }
+    let(:result) do
+      OpenStruct.new(body: JSON.parse(body),
+                     url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1-95cf-ba2f475cbfad")
+    end
+    let(:report) { Report.new(result) }
+    let(:args) { { header: report.header, url: report.report_url } }
+    subject(:job) { UsageUpdateParseJob.perform_later(report.datasets, args) }
 
-    it 'queues the job' do
-      expect { job }.to have_enqueued_job(UsageUpdateParseJob)
-        .on_queue("test_levriero_usage")
+    it "queues the job" do
+      expect { job }.to have_enqueued_job(UsageUpdateParseJob).
+        on_queue("test_levriero_usage")
     end
 
-    it 'execute further call' do
+    it "execute further call" do
       response = perform_enqueued_jobs do
         # UsageUpdateParseJob.new.perform(item, report.datasets)
       end
diff --git a/spec/lib/tasks/affiliation_identifier_rake_spec.rb b/spec/lib/tasks/affiliation_identifier_rake_spec.rb
index 0aed5aa0..607ce912 100644
--- a/spec/lib/tasks/affiliation_identifier_rake_spec.rb
+++ b/spec/lib/tasks/affiliation_identifier_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "affiliation_identifier:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an AffiliationIdentifierImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(AffiliationIdentifierImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 0 DOIs created from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 0 DOIs created from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an AffiliationIdentifierImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(0)
+    end.to change(enqueued_jobs, :size).by(0)
     expect(enqueued_jobs.last[:job]).to be(AffiliationIdentifierImportByMonthJob)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/lib/tasks/crossref_rake_spec.rb b/spec/lib/tasks/crossref_rake_spec.rb
index 3ee4dd60..42e7d68a 100644
--- a/spec/lib/tasks/crossref_rake_spec.rb
+++ b/spec/lib/tasks/crossref_rake_spec.rb
@@ -7,7 +7,9 @@
   ENV["FROM_DATE"] = "2018-01-04"
   ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs updated from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs updated from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 36129 DOIs updated from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 36129 DOIs updated from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
diff --git a/spec/lib/tasks/funder_identifier_rake_spec.rb b/spec/lib/tasks/funder_identifier_rake_spec.rb
index 90d28bd8..2edead47 100644
--- a/spec/lib/tasks/funder_identifier_rake_spec.rb
+++ b/spec/lib/tasks/funder_identifier_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "funder_identifier:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an FunderIdentifierImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(FunderIdentifierImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 30 DOIs created from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 30 DOIs created from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an FunderIdentifierImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(25)
+    end.to change(enqueued_jobs, :size).by(25)
     expect(enqueued_jobs.last[:job]).to be(FunderIdentifierImportJob)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/lib/tasks/name_identifier_rake_spec.rb b/spec/lib/tasks/name_identifier_rake_spec.rb
index cb92e8e0..7b22dd57 100644
--- a/spec/lib/tasks/name_identifier_rake_spec.rb
+++ b/spec/lib/tasks/name_identifier_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "name_identifier:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an NameIdentifierImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(NameIdentifierImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 48 DOIs created from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 48 DOIs created from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an NameIdentifierImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(25)
+    end.to change(enqueued_jobs, :size).by(25)
     expect(enqueued_jobs.last[:job]).to be(NameIdentifierImportJob)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/lib/tasks/orcid_affiliation_rake_spec.rb b/spec/lib/tasks/orcid_affiliation_rake_spec.rb
index 8c5cae83..7bcb0e46 100644
--- a/spec/lib/tasks/orcid_affiliation_rake_spec.rb
+++ b/spec/lib/tasks/orcid_affiliation_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "orcid_affiliation:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an OrcidAffiliationImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(OrcidAffiliationImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 0 DOIs created from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 0 DOIs created from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an OrcidAffiliationImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(0)
+    end.to change(enqueued_jobs, :size).by(0)
     expect(enqueued_jobs.last[:job]).to be(OrcidAffiliationImportByMonthJob)
   end
 end
diff --git a/spec/lib/tasks/related_identifier_rake_spec.rb b/spec/lib/tasks/related_identifier_rake_spec.rb
index 1eef3497..54f0e893 100644
--- a/spec/lib/tasks/related_identifier_rake_spec.rb
+++ b/spec/lib/tasks/related_identifier_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "related_identifier:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs created from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an RelatedIdentifierImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(RelatedIdentifierImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 1798 DOIs created from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 1798 DOIs created from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an RelatedIdentifierImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(25)
+    end.to change(enqueued_jobs, :size).by(25)
     expect(enqueued_jobs.last[:job]).to be(RelatedIdentifierImportJob)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/lib/tasks/related_url_rake_spec.rb b/spec/lib/tasks/related_url_rake_spec.rb
index 34731bd6..e324fedc 100644
--- a/spec/lib/tasks/related_url_rake_spec.rb
+++ b/spec/lib/tasks/related_url_rake_spec.rb
@@ -1,13 +1,15 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe "related_url:import_by_month", vcr: true do
   include ActiveJob::TestHelper
   include_context "rake"
 
-  ENV['FROM_DATE'] = "2018-01-04"
-  ENV['UNTIL_DATE'] = "2018-12-31"
+  ENV["FROM_DATE"] = "2018-01-04"
+  ENV["UNTIL_DATE"] = "2018-12-31"
 
-  let(:output) { "Queued import for DOIs updated from 2018-01-01 until 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for DOIs updated from 2018-01-01 until 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -18,9 +20,9 @@
   end
 
   it "should enqueue an RelatedUrlImportByMonthJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(12)
+    end.to change(enqueued_jobs, :size).by(12)
     expect(enqueued_jobs.last[:job]).to be(RelatedUrlImportByMonthJob)
   end
 end
@@ -29,7 +31,9 @@
   include ActiveJob::TestHelper
   include_context "rake"
 
-  let(:output) { "Queued import for 19 DOIs updated from 2018-01-04 - 2018-12-31.\n" }
+  let(:output) do
+    "Queued import for 19 DOIs updated from 2018-01-04 - 2018-12-31.\n"
+  end
 
   it "prerequisites should include environment" do
     expect(subject.prerequisites).to include("environment")
@@ -40,9 +44,9 @@
   end
 
   it "should enqueue an RelatedUrlImportJob" do
-    expect {
+    expect do
       capture_stdout { subject.invoke }
-    }.to change(enqueued_jobs, :size).by(19)
+    end.to change(enqueued_jobs, :size).by(19)
     expect(enqueued_jobs.last[:job]).to be(RelatedUrlImportJob)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/models/affiliation_identifier_spec.rb b/spec/models/affiliation_identifier_spec.rb
index 60ad11e5..d549c81a 100644
--- a/spec/models/affiliation_identifier_spec.rb
+++ b/spec/models/affiliation_identifier_spec.rb
@@ -6,20 +6,23 @@
     let(:until_date) { "2019-07-19" }
 
     it "import_by_month" do
-      response = AffiliationIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+      response = AffiliationIdentifier.import_by_month(from_date: from_date,
+                                                       until_date: until_date)
       expect(response).to eq("Queued import for DOIs created from 2019-07-01 until 2019-07-31.")
     end
 
     it "import zero" do
       from_date = "2019-07-01"
       until_date = "2019-07-01"
-      response = AffiliationIdentifier.import(from_date: from_date, until_date: until_date)
+      response = AffiliationIdentifier.import(from_date: from_date,
+                                              until_date: until_date)
       expect(response).to eq(0)
     end
 
     it "import" do
       until_date = "2019-07-19"
-      response = AffiliationIdentifier.import(from_date: from_date, until_date: until_date)
+      response = AffiliationIdentifier.import(from_date: from_date,
+                                              until_date: until_date)
       expect(response).to eq(0)
     end
 
@@ -29,7 +32,8 @@
       expect(response["@id"]).to eq("https://ror.org/02catss52")
       expect(response["@type"]).to eq("Organization")
       expect(response["name"]).to eq("European Bioinformatics Institute")
-      expect(response["location"]).to eq("addressCountry" => "United Kingdom", "type" => "postalAddress")
+      expect(response["location"]).to eq("addressCountry" => "United Kingdom",
+                                         "type" => "postalAddress")
     end
 
     # it "push_item" do
diff --git a/spec/models/base_spec.rb b/spec/models/base_spec.rb
index 0e5061f2..7775f74a 100644
--- a/spec/models/base_spec.rb
+++ b/spec/models/base_spec.rb
@@ -1,18 +1,23 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe Base, type: :model, vcr: true do
   context "get_datacite_xml" do
     it "fetch metadata scholarly-article" do
       id = "https://doi.org/10.5438/mk65-3m12"
       response = Base.get_datacite_xml(id)
-      expect(response.dig("relatedIdentifiers", "relatedIdentifier").length).to eq(3)
-      expect(response.dig("relatedIdentifiers", "relatedIdentifier").last).to eq("__content__"=>"10.5438/55e5-t5c0", "relatedIdentifierType"=>"DOI", "relationType"=>"References")
+      expect(response.dig("relatedIdentifiers",
+                          "relatedIdentifier").length).to eq(3)
+      expect(response.dig("relatedIdentifiers",
+                          "relatedIdentifier").last).to eq("__content__" => "10.5438/55e5-t5c0",
+                                                           "relatedIdentifierType" => "DOI", "relationType" => "References")
     end
 
     it "fetch metadata dataset" do
       id = "https://doi.org/10.4124/ccvcn4z"
       response = Base.get_datacite_xml(id)
-      expect(response.dig("relatedIdentifiers", "relatedIdentifier")).to eq("__content__"=>"10.1021/ja906895j", "relatedIdentifierType"=>"DOI", "relationType"=>"IsSupplementTo")
+      expect(response.dig("relatedIdentifiers",
+                          "relatedIdentifier")).to eq("__content__" => "10.1021/ja906895j",
+                                                      "relatedIdentifierType" => "DOI", "relationType" => "IsSupplementTo")
     end
   end
 
@@ -21,13 +26,17 @@
       id = "https://doi.org/10.5438/mk65-3m12"
       response = Base.get_datacite_json(id)
       expect(response.fetch("relatedIdentifiers", []).length).to eq(3)
-      expect(response.fetch("relatedIdentifiers", []).last).to eq("relatedIdentifier"=>"10.5438/55e5-t5c0", "relatedIdentifierType"=>"DOI", "relationType"=>"References")
+      expect(response.fetch("relatedIdentifiers",
+                            []).last).to eq("relatedIdentifier" => "10.5438/55e5-t5c0",
+                                            "relatedIdentifierType" => "DOI", "relationType" => "References")
     end
 
     it "fetch metadata dataset" do
       id = "https://doi.org/10.4124/ccvcn4z"
       response = Base.get_datacite_json(id)
-      expect(response.fetch("relatedIdentifiers", [])).to eq([{"relatedIdentifier"=>"10.1021/ja906895j", "relatedIdentifierType"=>"DOI", "relationType"=>"IsSupplementTo"}])
+      expect(response.fetch("relatedIdentifiers",
+                            [])).to eq([{ "relatedIdentifier" => "10.1021/ja906895j",
+                                          "relatedIdentifierType" => "DOI", "relationType" => "IsSupplementTo" }])
     end
   end
 
@@ -57,7 +66,7 @@
       response = Base.get_datacite_metadata(id)
       expect(response["@id"]).to eq("https://doi.org/10.70112/d7svvt")
       expect(response["@type"]).to eq("Dataset")
-       expect(response["registrantId"]).to eq("datacite.inist.inra")
+      expect(response["registrantId"]).to eq("datacite.inist.inra")
       expect(response["proxyIdentifiers"]).to be_empty
       expect(response["datePublished"]).to eq("2018")
     end
@@ -86,7 +95,7 @@
   context "get_crossref_member_id" do
     it "fetch crossref member_id" do
       id = "10.1055/s-0030-1259729"
-      options ={}
+      options = {}
       response = Base.get_crossref_member_id(id, options)
       expect(response).to eq("crossref.194")
     end
diff --git a/spec/models/crossref_funder_spec.rb b/spec/models/crossref_funder_spec.rb
index cd31a267..ab4f7d9d 100644
--- a/spec/models/crossref_funder_spec.rb
+++ b/spec/models/crossref_funder_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-01-04" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date, rows: 0)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date, rows: 0)
       expect(response).to eq("https://api.crossref.org/works?filter=has-funder%3Atrue%2Cfrom-created-date%3A2018-01-04%2Cuntil-created-date%3A2018-01-04&mailto=info%40datacite.org&rows=0")
     end
 
@@ -24,7 +25,8 @@
     end
 
     it "get_total in 2013" do
-      response = subject.get_total(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = subject.get_total(from_date: "2013-10-01",
+                                   until_date: "2013-10-31")
       expect(response).to eq(14304)
     end
   end
@@ -34,12 +36,14 @@
     let(:until_date) { "2018-01-04" }
 
     it "import_by_month" do
-      response = CrossrefFunder.import_by_month(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = CrossrefFunder.import_by_month(from_date: "2013-10-01",
+                                                until_date: "2013-10-31")
       expect(response).to eq("Queued import for DOIs created from 2013-10-01 until 2013-10-31.")
     end
 
     it "import" do
-      response = CrossrefFunder.import(from_date: from_date, until_date: until_date)
+      response = CrossrefFunder.import(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq(3352)
     end
 
diff --git a/spec/models/crossref_import_spec.rb b/spec/models/crossref_import_spec.rb
index b1c16bec..7b450e65 100644
--- a/spec/models/crossref_import_spec.rb
+++ b/spec/models/crossref_import_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-01-04" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date, rows: 0)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date, rows: 0)
       expect(response).to eq("https://api.crossref.org/works?filter=from-created-date%3A2018-01-04%2Cuntil-created-date%3A2018-01-04&mailto=info%40datacite.org&rows=0")
     end
 
@@ -24,7 +25,8 @@
     end
 
     it "get_total in 2013" do
-      response = subject.get_total(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = subject.get_total(from_date: "2013-10-01",
+                                   until_date: "2013-10-31")
       expect(response).to eq(663725)
     end
   end
@@ -34,12 +36,14 @@
     let(:until_date) { "2018-01-04" }
 
     it "import_by_month" do
-      response = CrossrefImport.import_by_month(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = CrossrefImport.import_by_month(from_date: "2013-10-01",
+                                                until_date: "2013-10-31")
       expect(response).to eq("Queued import for DOIs created from 2013-10-01 until 2013-10-31.")
     end
 
     it "import" do
-      response = CrossrefImport.import(from_date: from_date, until_date: until_date)
+      response = CrossrefImport.import(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq(17739)
     end
 
diff --git a/spec/models/crossref_orcid_spec.rb b/spec/models/crossref_orcid_spec.rb
index 6681b877..b3982ee2 100644
--- a/spec/models/crossref_orcid_spec.rb
+++ b/spec/models/crossref_orcid_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-01-04" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date, rows: 0)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date, rows: 0)
       expect(response).to eq("https://api.crossref.org/works?filter=has-orcid%3Atrue%2Cfrom-created-date%3A2018-01-04%2Cuntil-created-date%3A2018-01-04&mailto=info%40datacite.org&rows=0")
     end
 
@@ -24,7 +25,8 @@
     end
 
     it "get_total in 2013" do
-      response = subject.get_total(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = subject.get_total(from_date: "2013-10-01",
+                                   until_date: "2013-10-31")
       expect(response).to eq(4675)
     end
   end
@@ -34,12 +36,14 @@
     let(:until_date) { "2018-01-04" }
 
     it "import_by_month" do
-      response = CrossrefOrcid.import_by_month(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = CrossrefOrcid.import_by_month(from_date: "2013-10-01",
+                                               until_date: "2013-10-31")
       expect(response).to eq("Queued import for DOIs created from 2013-10-01 until 2013-10-31.")
     end
 
     it "import" do
-      response = CrossrefOrcid.import(from_date: from_date, until_date: until_date)
+      response = CrossrefOrcid.import(from_date: from_date,
+                                      until_date: until_date)
       expect(response).to eq(2826)
     end
 
diff --git a/spec/models/crossref_related_spec.rb b/spec/models/crossref_related_spec.rb
index 424525e8..35dc2113 100644
--- a/spec/models/crossref_related_spec.rb
+++ b/spec/models/crossref_related_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-01-04" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date, rows: 0)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date, rows: 0)
       expect(response).to eq("https://api.crossref.org/works?filter=reference-visibility%3Aopen%2Chas-references%3Atrue%2Cfrom-created-date%3A2018-01-04%2Cuntil-created-date%3A2018-01-04&mailto=info%40datacite.org&rows=0")
     end
 
@@ -24,7 +25,8 @@
     end
 
     it "get_total in 2013" do
-      response = subject.get_total(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = subject.get_total(from_date: "2013-10-01",
+                                   until_date: "2013-10-31")
       expect(response).to eq(251843)
     end
   end
@@ -34,12 +36,14 @@
     let(:until_date) { "2018-01-04" }
 
     it "import_by_month" do
-      response = CrossrefRelated.import_by_month(from_date: "2013-10-01", until_date: "2013-10-31")
+      response = CrossrefRelated.import_by_month(from_date: "2013-10-01",
+                                                 until_date: "2013-10-31")
       expect(response).to eq("Queued import for DOIs created from 2013-10-01 until 2013-10-31.")
     end
 
     it "import" do
-      response = CrossrefRelated.import(from_date: from_date, until_date: until_date)
+      response = CrossrefRelated.import(from_date: from_date,
+                                        until_date: until_date)
       expect(response).to eq(8122)
     end
 
diff --git a/spec/models/crossref_spec.rb b/spec/models/crossref_spec.rb
index 1ed2080d..d7deb748 100644
--- a/spec/models/crossref_spec.rb
+++ b/spec/models/crossref_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe Crossref, type: :model, vcr: true do
   context "import crossref events" do
@@ -6,7 +6,8 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = Crossref.import_by_month(from_date: from_date, until_date: until_date)
+      response = Crossref.import_by_month(from_date: from_date,
+                                          until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2018-08-31.")
     end
 
@@ -16,4 +17,4 @@
       expect(response).to eq(439)
     end
   end
-end
\ No newline at end of file
+end
diff --git a/spec/models/funder_identifier_spec.rb b/spec/models/funder_identifier_spec.rb
index 27eb8467..f16d40ba 100644
--- a/spec/models/funder_identifier_spec.rb
+++ b/spec/models/funder_identifier_spec.rb
@@ -6,13 +6,15 @@
     let(:until_date) { "2019-06-30" }
 
     it "import_by_month" do
-      response = FunderIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+      response = FunderIdentifier.import_by_month(from_date: from_date,
+                                                  until_date: until_date)
       expect(response).to eq("Queued import for DOIs created from 2018-01-01 until 2019-06-30.")
     end
 
     it "import" do
       until_date = "2019-06-30"
-      response = FunderIdentifier.import(from_date: from_date, until_date: until_date)
+      response = FunderIdentifier.import(from_date: from_date,
+                                         until_date: until_date)
       expect(response).to eq(31)
     end
 
diff --git a/spec/models/name_identifier_spec.rb b/spec/models/name_identifier_spec.rb
index e4f1ab1d..1f0624e0 100644
--- a/spec/models/name_identifier_spec.rb
+++ b/spec/models/name_identifier_spec.rb
@@ -6,13 +6,15 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = NameIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+      response = NameIdentifier.import_by_month(from_date: from_date,
+                                                until_date: until_date)
       expect(response).to eq("Queued import for DOIs created from 2018-01-01 until 2018-08-31.")
     end
 
     it "import" do
       until_date = "2018-01-31"
-      response = NameIdentifier.import(from_date: from_date, until_date: until_date)
+      response = NameIdentifier.import(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq(22)
     end
 
diff --git a/spec/models/orcid_affiliation_spec.rb b/spec/models/orcid_affiliation_spec.rb
index 817982a7..285644f7 100644
--- a/spec/models/orcid_affiliation_spec.rb
+++ b/spec/models/orcid_affiliation_spec.rb
@@ -6,20 +6,23 @@
     let(:until_date) { "2019-07-19" }
 
     it "import_by_month" do
-      response = OrcidAffiliation.import_by_month(from_date: from_date, until_date: until_date)
+      response = OrcidAffiliation.import_by_month(from_date: from_date,
+                                                  until_date: until_date)
       expect(response).to eq("Queued import for DOIs created from 2019-07-01 until 2019-07-31.")
     end
 
     it "import zero" do
       from_date = "2019-07-01"
       until_date = "2019-07-01"
-      response = OrcidAffiliation.import(from_date: from_date, until_date: until_date)
+      response = OrcidAffiliation.import(from_date: from_date,
+                                         until_date: until_date)
       expect(response).to eq(0)
     end
 
     it "import" do
       until_date = "2019-07-31"
-      response = OrcidAffiliation.import(from_date: from_date, until_date: until_date)
+      response = OrcidAffiliation.import(from_date: from_date,
+                                         until_date: until_date)
       expect(response).to eq(0)
     end
 
diff --git a/spec/models/related_arxiv_spec.rb b/spec/models/related_arxiv_spec.rb
index e9b72ca5..c6144aa8 100644
--- a/spec/models/related_arxiv_spec.rb
+++ b/spec/models/related_arxiv_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-08-05" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq("https://api.stage.datacite.org/dois?query=relatedIdentifiers.relatedIdentifierType%3AarXiv+AND+updated%3A%5B2018-01-04T00%3A00%3A00Z+TO+2018-08-05T23%3A59%3A59Z%5D&resource-type-id=&page%5Bnumber%5D=1&page%5Bsize%5D=1000&exclude_registration_agencies=true&affiliation=true")
     end
 
@@ -23,13 +24,15 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = RelatedArxiv.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedArxiv.import_by_month(from_date: from_date,
+                                              until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2018-08-31.")
     end
 
     it "import" do
       until_date = "2018-12-31"
-      response = RelatedArxiv.import(from_date: from_date, until_date: until_date)
+      response = RelatedArxiv.import(from_date: from_date,
+                                     until_date: until_date)
       expect(response).to eq(2)
     end
 
diff --git a/spec/models/related_handle_spec.rb b/spec/models/related_handle_spec.rb
index 133a277a..141ab5b9 100644
--- a/spec/models/related_handle_spec.rb
+++ b/spec/models/related_handle_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2020-08-05" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq("https://api.stage.datacite.org/dois?query=relatedIdentifiers.relatedIdentifierType%3AHandle+AND+updated%3A%5B2018-01-04T00%3A00%3A00Z+TO+2020-08-05T23%3A59%3A59Z%5D&resource-type-id=&page%5Bnumber%5D=1&page%5Bsize%5D=1000&exclude_registration_agencies=true&affiliation=true")
     end
 
@@ -23,13 +24,15 @@
     let(:until_date) { "2020-08-05" }
 
     it "import_by_month" do
-      response = RelatedHandle.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedHandle.import_by_month(from_date: from_date,
+                                               until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2020-08-31.")
     end
 
     it "import" do
       until_date = "2020-12-31"
-      response = RelatedHandle.import(from_date: from_date, until_date: until_date)
+      response = RelatedHandle.import(from_date: from_date,
+                                      until_date: until_date)
       expect(response).to eq(321)
     end
 
diff --git a/spec/models/related_identifier_spec.rb b/spec/models/related_identifier_spec.rb
index 791ce3b4..63a19529 100644
--- a/spec/models/related_identifier_spec.rb
+++ b/spec/models/related_identifier_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe RelatedIdentifier, type: :model, vcr: true do
   context "import related_identifiers" do
@@ -6,13 +6,15 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = RelatedIdentifier.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedIdentifier.import_by_month(from_date: from_date,
+                                                   until_date: until_date)
       expect(response).to eq("Queued import for DOIs created from 2018-01-01 until 2018-08-31.")
     end
 
     it "import" do
       until_date = "2018-01-31"
-      response = RelatedIdentifier.import(from_date: from_date, until_date: until_date)
+      response = RelatedIdentifier.import(from_date: from_date,
+                                          until_date: until_date)
       expect(response).to eq(97)
     end
 
@@ -20,7 +22,7 @@
       doi = "10.5061/dryad.j86rt6b"
       # attributes = RelatedIdentifier.get_datacite_json(doi)
       # response = RelatedIdentifier.push_item({ "id" => doi, "type" => "dois", "attributes" => attributes })
-      # expect(response).to eq(8) 
+      # expect(response).to eq(8)
     end
   end
 end
diff --git a/spec/models/related_igsn_spec.rb b/spec/models/related_igsn_spec.rb
index c5078708..d7ac529c 100644
--- a/spec/models/related_igsn_spec.rb
+++ b/spec/models/related_igsn_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-08-05" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq("https://api.stage.datacite.org/dois?query=relatedIdentifiers.relatedIdentifierType%3AIGSN+AND+updated%3A%5B2018-01-04T00%3A00%3A00Z+TO+2018-08-05T23%3A59%3A59Z%5D&resource-type-id=&page%5Bnumber%5D=1&page%5Bsize%5D=1000&exclude_registration_agencies=true&affiliation=true")
     end
 
@@ -23,13 +24,15 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = RelatedIgsn.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedIgsn.import_by_month(from_date: from_date,
+                                             until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2018-08-31.")
     end
 
     it "import" do
       until_date = "2018-12-31"
-      response = RelatedIgsn.import(from_date: from_date, until_date: until_date)
+      response = RelatedIgsn.import(from_date: from_date,
+                                    until_date: until_date)
       expect(response).to eq(1)
     end
 
diff --git a/spec/models/related_pmid_spec.rb b/spec/models/related_pmid_spec.rb
index 19172cbd..76ce6dc0 100644
--- a/spec/models/related_pmid_spec.rb
+++ b/spec/models/related_pmid_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe RelatedPmid, type: :model, vcr: true do
   context "instance methods" do
@@ -8,7 +8,8 @@
     let(:until_date) { "2020-08-05" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date)
       puts response
       expect(response).to eq("https://api.stage.datacite.org/dois?query=relatedIdentifiers.relatedIdentifierType%3APMID+AND+updated%3A%5B2018-01-04T00%3A00%3A00Z+TO+2020-08-05T23%3A59%3A59Z%5D&resource-type-id=&page%5Bnumber%5D=1&page%5Bsize%5D=1000&exclude_registration_agencies=true&affiliation=true")
     end
@@ -24,13 +25,15 @@
     let(:until_date) { "2020-08-05" }
 
     it "import_by_month" do
-      response = RelatedPmid.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedPmid.import_by_month(from_date: from_date,
+                                             until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2020-08-31.")
     end
 
     it "import" do
       until_date = "2020-12-31"
-      response = RelatedPmid.import(from_date: from_date, until_date: until_date)
+      response = RelatedPmid.import(from_date: from_date,
+                                    until_date: until_date)
       expect(response).to eq(594)
     end
 
diff --git a/spec/models/related_url_spec.rb b/spec/models/related_url_spec.rb
index 622a4748..9d22d88e 100644
--- a/spec/models/related_url_spec.rb
+++ b/spec/models/related_url_spec.rb
@@ -8,7 +8,8 @@
     let(:until_date) { "2018-08-05" }
 
     it "get_query_url" do
-      response = subject.get_query_url(from_date: from_date, until_date: until_date)
+      response = subject.get_query_url(from_date: from_date,
+                                       until_date: until_date)
       expect(response).to eq("https://api.stage.datacite.org/dois?query=relatedIdentifiers.relatedIdentifierType%3AURL+AND+updated%3A%5B2018-01-04T00%3A00%3A00Z+TO+2018-08-05T23%3A59%3A59Z%5D&resource-type-id=&page%5Bnumber%5D=1&page%5Bsize%5D=1000&exclude_registration_agencies=true&affiliation=true")
     end
 
@@ -23,7 +24,8 @@
     let(:until_date) { "2018-08-05" }
 
     it "import_by_month" do
-      response = RelatedUrl.import_by_month(from_date: from_date, until_date: until_date)
+      response = RelatedUrl.import_by_month(from_date: from_date,
+                                            until_date: until_date)
       expect(response).to eq("Queued import for DOIs updated from 2018-01-01 until 2018-08-31.")
     end
 
diff --git a/spec/models/report_spec.rb b/spec/models/report_spec.rb
index 1c9ffb59..85d14f59 100644
--- a/spec/models/report_spec.rb
+++ b/spec/models/report_spec.rb
@@ -1,10 +1,12 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe Report, type: :model, vcr: true do
-  let(:body)     {File.read(fixture_path + 'resolution_compress_small.json')}
-  let(:response) {OpenStruct.new(body:  JSON.parse(body) )}
-  let(:report)   {Report.new(response)}
-  let(:url) {"https://api.stage.datacite.org/reports/9e5461d8-0713-4abd-8e87-e4533a76ab3d"} #original for test
+  let(:body)     { File.read("#{fixture_path}resolution_compress_small.json") }
+  let(:response) { OpenStruct.new(body: JSON.parse(body)) }
+  let(:report)   { Report.new(response) }
+  let(:url) do
+    "https://api.stage.datacite.org/reports/9e5461d8-0713-4abd-8e87-e4533a76ab3d"
+  end
   # let(:url) {"https://api.stage.datacite.org/reports/82022fc3-8b31-47f2-88a8-24814d9bd2f0"}
 
   # describe "parse_multi_subset_report" do
@@ -28,13 +30,20 @@
 
   describe "parse_normal_report" do
     context "when report is ok" do
-      let(:body)  {File.read(fixture_path + 'multi_subset_report.json')}
-      let(:uncompressed)  {File.read(fixture_path + 'datacite_resolution_report_2018-09.json')}
-      let(:result) {OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")}
-      let(:report) {Report.new(result)}
+      let(:body)  { File.read("#{fixture_path}multi_subset_report.json") }
+      let(:uncompressed) do
+        File.read("#{fixture_path}datacite_resolution_report_2018-09.json")
+      end
+      let(:result) do
+        OpenStruct.new(body: JSON.parse(body),
+                       url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
+      end
+      let(:report) { Report.new(result) }
 
       it "should parsed it give you two arrays that are in every gzip" do
-        live_results = Maremma.get("https://api.datacite.org/reports/21fd2e8e-5481-4bbd-b2ef-742d8b270a66", host: "https://api.datacite.org/")
+        live_results = Maremma.get(
+          "https://api.datacite.org/reports/21fd2e8e-5481-4bbd-b2ef-742d8b270a66", host: "https://api.datacite.org/"
+        )
         report = Report.new(live_results)
         rr = Report.parse_normal_report report
         expect(rr.size).to eq(0)
diff --git a/spec/models/usage_update_spec.rb b/spec/models/usage_update_spec.rb
index 8ab380e8..0c8f2ba2 100644
--- a/spec/models/usage_update_spec.rb
+++ b/spec/models/usage_update_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe UsageUpdate, type: :model, vcr: true do
   describe "get_data" do
@@ -13,15 +13,15 @@
 
     context "when there are NO messages" do
       it "should return empty" do
-        options ={}
-        message= ""
-        response = UsageUpdate.get_data(message,options)
+        options = {}
+        message = ""
+        response = UsageUpdate.get_data(message, options)
         expect(response.body["errors"]).to eq("No Report given given")
       end
     end
   end
 
-  # describe "queue" do 
+  # describe "queue" do
   #   context "get_total" do
   #     it "when is working with AWS" do
   #       expect(subject.get_total()).to respond_to(:+)
@@ -39,11 +39,13 @@
   describe "parse_data" do
     context "when the usage event was NOT found" do
       it "should return errors" do
-        body = File.read(fixture_path + 'usage_update_nil.json')
-        result = OpenStruct.new(body:  JSON.parse(body) )
+        body = File.read("#{fixture_path}usage_update_nil.json")
+        result = OpenStruct.new(body: JSON.parse(body))
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
         expect(response).to be_a(Array)
         expect(response).to be_empty
 
@@ -61,11 +63,13 @@
 
     context "when the usage report was NOT found" do
       it "should return errors" do
-        body = File.read(fixture_path + 'usage_update_nil.json')
-        result = OpenStruct.new(body:  JSON.parse(body) )
+        body = File.read("#{fixture_path}usage_update_nil.json")
+        result = OpenStruct.new(body: JSON.parse(body))
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
 
         expect(response).to be_a(Array)
         expect(response).to be_empty
@@ -75,27 +79,41 @@
 
     context "when the report was found" do
       it "should parsed it correctly" do
-        body = File.read(fixture_path + 'usage_update.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad"  )
-   
+        body = File.read("#{fixture_path}usage_update.json")
+        result = OpenStruct.new(body: JSON.parse(body),
+                                url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
+
         report = Report.new(result)
         args = { header: report.header, url: report.report_url }
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
-      
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
+
         expect(response.length).to eq(2)
-        expect(response.last.except("uuid")).to eq("subj"=>{"id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued"=>"2128-04-09"},"total"=>3,"message-action" => "create", "subj-id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "obj-id"=>"https://doi.org/10.7291/d1q94r", "relation-type-id"=>"unique-dataset-investigations-regular", "source-id"=>"datacite-usage", "occurred-at"=>"2013-11-02", "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "source-token" => ENV['DATACITE_USAGE_SOURCE_TOKEN'])
+        expect(response.last.except("uuid")).to eq(
+          "subj" => {
+            "id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued" => "2128-04-09"
+          }, "total" => 3, "message-action" => "create", "subj-id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "obj-id" => "https://doi.org/10.7291/d1q94r", "relation-type-id" => "unique-dataset-investigations-regular", "source-id" => "datacite-usage", "occurred-at" => "2013-11-02", "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "source-token" => ENV["DATACITE_USAGE_SOURCE_TOKEN"]
+        )
       end
 
       it "should parsed it correctly resolution" do
-        body = File.read(fixture_path + 'resolution_update.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad"  )
+        body = File.read("#{fixture_path}resolution_update.json")
+        result = OpenStruct.new(body: JSON.parse(body),
+                                url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
 
         report = Report.new(result)
         args = { header: report.header, url: report.report_url }
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
 
         expect(response.length).to eq(136)
-        expect(response.last.except("uuid")).to eq("subj"=>{"id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued"=>"2018-10-28"},"total"=>37,"message-action" => "create", "subj-id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "obj-id"=>"https://doi.org/10.6084/m9.figshare.6158567.v1", "relation-type-id"=>"total-resolutions-machine", "source-id"=>"datacite-resolution", "occurred-at"=>"2019-05-01", "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "source-token" => ENV['DATACITE_RESOLUTION_SOURCE_TOKEN'])
+        expect(response.last.except("uuid")).to eq(
+          "subj" => {
+            "id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued" => "2018-10-28"
+          }, "total" => 37, "message-action" => "create", "subj-id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "obj-id" => "https://doi.org/10.6084/m9.figshare.6158567.v1", "relation-type-id" => "total-resolutions-machine", "source-id" => "datacite-resolution", "occurred-at" => "2019-05-01", "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "source-token" => ENV["DATACITE_RESOLUTION_SOURCE_TOKEN"]
+        )
       end
 
       # it "should parsed it correctly from call" do
@@ -111,59 +129,73 @@
       # end
 
       it "should parsed it correctly from dataone with strange doi names" do
-        body = File.read(fixture_path + 'dataone.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/f0e06846-7af1-4e43-a32b-8d299e99bd21"  )
+        body = File.read("#{fixture_path}dataone.json")
+        result = OpenStruct.new(body: JSON.parse(body),
+                                url: "https://api.stage.datacite.org/reports/f0e06846-7af1-4e43-a32b-8d299e99bd21")
         # response = UsageUpdate.parse_data(result)
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
 
         expect(response.last.fetch("obj-id")).to eq("https://doi.org/10.5063/aa/bowdish.122.10")
         # expect(doi_instances.first.dig("total")).to eq(1083)
       end
 
       it "should parsed it correctly when it has five metrics  and two DOIs" do
-        body = File.read(fixture_path + 'usage_update_3.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad"  )
+        body = File.read("#{fixture_path}usage_update_3.json")
+        result = OpenStruct.new(body: JSON.parse(body),
+                                url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
         # response = UsageUpdate.parse_data(result)
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
         expect(response.length).to eq(5)
-        expect(response.last.except("uuid")).to eq("message-action"=>"create", "subj-id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "subj"=>{"id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued"=>"2128-04-09"}, "total"=>208, "obj-id"=>"https://doi.org/10.6071/z7wc73", "relation-type-id"=>"Unique-Dataset-Requests-Machine", "source-id"=>"datacite-usage", "source-token"=>ENV['DATACITE_USAGE_SOURCE_TOKEN'], "occurred-at"=>"2013-11-02", "license"=>"https://creativecommons.org/publicdomain/zero/1.0/")
+        expect(response.last.except("uuid")).to eq("message-action" => "create",
+                                                   "subj-id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "subj" => { "id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued" => "2128-04-09" }, "total" => 208, "obj-id" => "https://doi.org/10.6071/z7wc73", "relation-type-id" => "Unique-Dataset-Requests-Machine", "source-id" => "datacite-usage", "source-token" => ENV["DATACITE_USAGE_SOURCE_TOKEN"], "occurred-at" => "2013-11-02", "license" => "https://creativecommons.org/publicdomain/zero/1.0/")
       end
 
       it "should parsed it correctly when it has two metrics per DOI " do
-        body = File.read(fixture_path + 'usage_update_2.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
+        body = File.read("#{fixture_path}usage_update_2.json")
+        result = OpenStruct.new(body: JSON.parse(body), url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
         # response = UsageUpdate.parse_data(result)
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
-        
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
+
         expect(response.length).to eq(4)
-        expect(response.last.except("uuid")).to eq("message-action"=>"create", "subj-id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "subj"=>{"id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued"=>"2128-04-09"}, "total"=>208, "obj-id"=>"https://doi.org/10.6071/z7wc73", "relation-type-id"=>"Unique-Dataset-Requests-Machine", "source-id"=>"datacite-usage", "source-token"=>ENV['DATACITE_USAGE_SOURCE_TOKEN'], "occurred-at"=>"2013-11-02", "license"=>"https://creativecommons.org/publicdomain/zero/1.0/")
+        expect(response.last.except("uuid")).to eq("message-action" => "create",
+                                                   "subj-id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "subj" => { "id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad", "issued" => "2128-04-09" }, "total" => 208, "obj-id" => "https://doi.org/10.6071/z7wc73", "relation-type-id" => "Unique-Dataset-Requests-Machine", "source-id" => "datacite-usage", "source-token" => ENV["DATACITE_USAGE_SOURCE_TOKEN"], "occurred-at" => "2013-11-02", "license" => "https://creativecommons.org/publicdomain/zero/1.0/")
       end
 
       it "should send a warning if there are more than 4 metrics" do
-        body = File.read(fixture_path + 'usage_update_1.json')
-        result = OpenStruct.new(body: JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad"  )
+        body = File.read("#{fixture_path}usage_update_1.json")
+        result = OpenStruct.new(body: JSON.parse(body),
+                                url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
         # response = UsageUpdate.parse_data(result)
         report = Report.new(result)
-        args = {header: report.header, url:report.report_url}
-        response = Report.translate_datasets(result.body.dig("data","report","report-datasets"), args)
-        
+        args = { header: report.header, url: report.report_url }
+        response = Report.translate_datasets(
+          result.body.dig("data", "report", "report-datasets"), args
+        )
+
         expect(response.length).to eq(1)
         expect(response).to be_a(Array)
-        expect(response.last.body).to eq({"errors"=>"There are too many instances in 10.7291/D1Q94R for report https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad. There can only be 4"})
+        expect(response.last.body).to eq({ "errors" => "There are too many instances in 10.7291/D1Q94R for report https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad. There can only be 4" })
       end
     end
 
     describe "parse_data compressed" do
       context "when the usage event is ok" do
         it "should return report parsed" do
-          body = File.read(fixture_path + 'datacite_resolution_report_2018-09_encoded.json')
-          result = OpenStruct.new(body:  JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad" )
+          body = File.read("#{fixture_path}datacite_resolution_report_2018-09_encoded.json")
+          result = OpenStruct.new(body: JSON.parse(body),
+                                  url: "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
           # parsed = UsageUpdate.parse_data(result)
           expect(Report.new(result).compressed_report?).to be(true)
         end
@@ -185,7 +217,7 @@
     #       result = OpenStruct.new(body:  JSON.parse(body), url:"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad" )
     #       parsed = Report.new(result).parse_data
     #       # puts parsed
-            # checksum is wrong
+    # checksum is wrong
     #       expect(parsed).to be_a(Array)
     #       expect(parsed.size).to eq(95056)
     #     end
@@ -193,16 +225,18 @@
     # end
   end
 
-
   describe "wrap event" do
-    let(:options) {
-      {report_meta:{
-        report_id: "1a6e79ea-5291-4f5f-a25e-2cb071715bfc", 
-        created_by: 'datacite', 
-        reporting_period: ""}}
-    }
-    let(:item) {{"obj-id"=>"https://doi.org/10.14278/rodaretest.11", "total"=>45}}
-    
+    let(:options) do
+      { report_meta: {
+        report_id: "1a6e79ea-5291-4f5f-a25e-2cb071715bfc",
+        created_by: "datacite",
+        reporting_period: "",
+      } }
+    end
+    let(:item) do
+      { "obj-id" => "https://doi.org/10.14278/rodaretest.11", "total" => 45 }
+    end
+
     it "should format correctly" do
       # expect((UsageUpdate.wrap_event(item,options)).dig("data","attributes","obj")).to eq({"id"=>"https://doi.org/10.14278/rodaretest.11", "type"=>"dataset", "name"=>"Large Image", "author"=>[{"given_name"=>"Tester", "family_name"=>"Test"}], "publisher"=>"Rodare", "date_published"=>"2018-04-10", "date_modified"=>"2018-10-28T02:01:02.000Z", "registrant_id"=>"datacite.tib.hzdr"})
       # expect((UsageUpdate.wrap_event(item,options)).dig("data","attributes","total")).to eq(45)
@@ -210,12 +244,12 @@
   end
 
   context "push_data" do
-    let!(:events) {create_list(:event,10)}
+    let!(:events) { create_list(:event, 10) }
 
     it "should work with a single item" do
-      body = File.read(fixture_path + 'usage_events.json')
+      body = File.read("#{fixture_path}usage_events.json")
       result = JSON.parse(body).first.to_json
-      options = { }
+      options = {}
       # expect(UsageUpdate.push_item(result, options)).to eq(true)
     end
   end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 51414cf8..79b3c519 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1,11 +1,11 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe User, type: :model do
   describe "from token" do
     let(:token) { User.generate_token }
     let(:user) { User.new(token) }
 
-    describe 'User attributes' do
+    describe "User attributes" do
       it "has role_id" do
         expect(user.role_id).to eq("staff_admin")
       end
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index 05a18c2e..f5cdbb82 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -7,7 +7,7 @@
 
 require File.expand_path("../config/environment", __dir__)
 
-Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
+Dir[Rails.root.join("spec/support/**/*.rb")].sort.each { |f| require f }
 
 require "rspec/rails"
 require "shoulda-matchers"
@@ -31,7 +31,7 @@
 end
 
 def fixture_path
-  File.expand_path("fixtures", __dir__) + "/"
+  "#{File.expand_path('fixtures', __dir__)}/"
 end
 
 RSpec.configure do |config|
diff --git a/spec/support/shared_contexts/rake.rb b/spec/support/shared_contexts/rake.rb
index 4ef42dff..33af969c 100644
--- a/spec/support/shared_contexts/rake.rb
+++ b/spec/support/shared_contexts/rake.rb
@@ -5,12 +5,16 @@
   let(:rake)      { Rake::Application.new }
   let(:task_name) { self.class.top_level_description.split("[").first }
   let(:regexp)    { Regexp.new('\[([\w,]+)\]') }
-  let(:task_args) { regexp.match(self.class.top_level_description)[1].split(",") }
+  let(:task_args) do
+    regexp.match(self.class.top_level_description)[1].split(",")
+  end
   let(:task_path) { "lib/tasks/#{task_name.split(':').first}" }
   subject         { rake[task_name] }
 
   def loaded_files_excluding_current_rake_file
-    $LOADED_FEATURES.reject { |file| file == Rails.root.join("#{task_path}.rake").to_s }
+    $LOADED_FEATURES.reject do |file|
+      file == Rails.root.join("#{task_path}.rake").to_s
+    end
   end
 
   before do
@@ -19,4 +23,4 @@ def loaded_files_excluding_current_rake_file
     Levriero::Application.load_tasks
     Rake::Task.define_task(:environment)
   end
-end
\ No newline at end of file
+end
diff --git a/spec/support/task_helper.rb b/spec/support/task_helper.rb
index 8a21e2f5..e4df16b7 100644
--- a/spec/support/task_helper.rb
+++ b/spec/support/task_helper.rb
@@ -15,7 +15,7 @@ module TaskExampleGroup
   extend ActiveSupport::Concern
 
   included do
-    let(:task_name) { self.class.top_level_description.sub(/\Arake /, "") }
+    let(:task_name) { self.class.top_level_description.delete_prefix("rake ") }
     let(:tasks) { Rake::Task }
 
     # Make the Rake task available as `task` in your examples:
@@ -23,11 +23,9 @@ module TaskExampleGroup
   end
 end
 
-
 RSpec.configure do |config|
-
   # Tag Rake specs with `:task` metadata or put them in the spec/tasks dir
-  config.define_derived_metadata(:file_path => %r{/spec/tasks/}) do |metadata|
+  config.define_derived_metadata(file_path: %r{/spec/tasks/}) do |metadata|
     metadata[:type] = :task
   end
 
diff --git a/spec/workers/doi_import_worker_spec.rb b/spec/workers/doi_import_worker_spec.rb
index 84d520b2..0bc10f56 100644
--- a/spec/workers/doi_import_worker_spec.rb
+++ b/spec/workers/doi_import_worker_spec.rb
@@ -1,17 +1,23 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe DoiImportWorker do
   context "related_identifier", vcr: true do
     let(:doi) { "10.17863/cam.12119" }
-    let(:data) { { "id" => doi, "type" => "dois", "attributes" => {"doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z","updated" => "2018-10-07T05:42:36.000Z"}}.to_json }
-    let(:sqs_msg) { double message_id: 'fc754df7-9cc2-4c41-96ca-5996a44b771e', body: data, delete: nil }
-    
+    let(:data) do
+      { "id" => doi, "type" => "dois",
+        "attributes" => { "doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z", "updated" => "2018-10-07T05:42:36.000Z" } }.to_json
+    end
+    let(:sqs_msg) do
+      double message_id: "fc754df7-9cc2-4c41-96ca-5996a44b771e", body: data,
+             delete: nil
+    end
+
     subject { DoiImportWorker.new }
 
-    it 'find related_identifier' do
+    it "find related_identifier" do
       # related_identifiers = subject.perform(sqs_msg, data)
       # expect(related_identifiers.length).to eq(1)
-      # expect(related_identifiers.first).to eq("affiliation" => [], 
+      # expect(related_identifiers.first).to eq("affiliation" => [],
       #   "familyName" => "Liu",
       #   "givenName" => "Yang",
       #   "name" => "Liu, Yang",
@@ -22,9 +28,15 @@
 
   context "name_identifier", vcr: true do
     let(:doi) { "10.17863/cam.9820" }
-    let(:data) { { "id" => doi, "type" => "dois", "attributes" => {"doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z","updated" => "2018-10-07T05:42:36.000Z"}}.to_json }
-    let(:sqs_msg) { double message_id: 'fc754df7-9cc2-4c41-96ca-5996a44b771e', body: data, delete: nil }
-    
+    let(:data) do
+      { "id" => doi, "type" => "dois",
+        "attributes" => { "doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z", "updated" => "2018-10-07T05:42:36.000Z" } }.to_json
+    end
+    let(:sqs_msg) do
+      double message_id: "fc754df7-9cc2-4c41-96ca-5996a44b771e", body: data,
+             delete: nil
+    end
+
     # subject { DoiImportWorker.new }
 
     # it 'find name_identifier' do
@@ -40,9 +52,15 @@
 
   context "funder_identifier", vcr: true do
     let(:doi) { "10.4224/crm.2010f.selm-1" }
-    let(:data) { { "id" => doi, "type" => "dois", "attributes" => {"doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z","updated" => "2018-10-07T05:42:36.000Z"}}.to_json }
-    let(:sqs_msg) { double message_id: 'fc754df7-9cc2-4c41-96ca-5996a44b771e', body: data, delete: nil }
-  
+    let(:data) do
+      { "id" => doi, "type" => "dois",
+        "attributes" => { "doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z", "updated" => "2018-10-07T05:42:36.000Z" } }.to_json
+    end
+    let(:sqs_msg) do
+      double message_id: "fc754df7-9cc2-4c41-96ca-5996a44b771e", body: data,
+             delete: nil
+    end
+
     # subject { DoiImportWorker.new }
 
     # it 'find funder_identifier' do
diff --git a/spec/workers/usage_update_import_worker_spec.rb b/spec/workers/usage_update_import_worker_spec.rb
index 34bcc6bf..4bbd13ab 100644
--- a/spec/workers/usage_update_import_worker_spec.rb
+++ b/spec/workers/usage_update_import_worker_spec.rb
@@ -1,13 +1,18 @@
-require 'rails_helper'
+require "rails_helper"
 
 describe UsageUpdateImportWorker do
   context "usage_report", vcr: true do
-    let(:data) { {"report_id"=>"https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad"}.to_json }
-    let(:sqs_msg) { double message_id: 'fc754df7-9cc2-4c41-96ca-5996a44b771e', body: data, delete: nil }
-    
+    let(:data) do
+      { "report_id" => "https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad" }.to_json
+    end
+    let(:sqs_msg) do
+      double message_id: "fc754df7-9cc2-4c41-96ca-5996a44b771e", body: data,
+             delete: nil
+    end
+
     subject { UsageUpdateImportWorker.new }
 
-    it 'find usage reports' do
+    it "find usage reports" do
       parse_job = subject.perform(sqs_msg, data)
       expect(parse_job.arguments.first).to eq("https://api.stage.datacite.org/reports/5cac6ca0-9391-4e1d-95cf-ba2f475cbfad")
     end
@@ -17,7 +22,7 @@
   #   let(:doi) { "10.0133/32096" }
   #   let(:data) { { "id" => doi, "type" => "dois", "attributes" => {"doi" => doi, "state" => "findable", "created" => "2018-10-07T05:42:35.000Z","updated" => "2018-10-07T05:42:36.000Z"}}.to_json }
   #   let(:sqs_msg) { double message_id: 'fc754df7-9cc2-4c41-96ca-5996a44b771e', body: data, delete: nil }
-  
+
   #   it 'find funder_identifier' do
   #     funder_identifiers = subject.perform(sqs_msg, data)
   #     expect(funder_identifiers.length).to eq(1)