From 1e83869aa55503daf2ce60f1d3b71b3740776d21 Mon Sep 17 00:00:00 2001 From: Martin Fenner Date: Sun, 22 Nov 2020 08:03:43 +0100 Subject: [PATCH] rubocop linting #681 --- .rubocop.yml | 840 +++-------- Gemfile | 7 +- Gemfile.lock | 50 +- Rakefile | 2 + app/controllers/activities_controller.rb | 123 +- app/controllers/application_controller.rb | 95 +- app/controllers/client_prefixes_controller.rb | 203 ++- app/controllers/clients_controller.rb | 299 ++-- app/controllers/concerns/countable.rb | 250 +++- .../concerns/error_serializable.rb | 22 +- app/controllers/concerns/facetable.rb | 285 ++-- app/controllers/concerns/fieldable.rb | 2 + app/controllers/concerns/paginatable.rb | 25 +- app/controllers/data_centers_controller.rb | 140 +- app/controllers/datacite_dois_controller.rb | 1223 ++++++++++++----- app/controllers/events_controller.rb | 331 +++-- app/controllers/exports_controller.rb | 337 +++-- app/controllers/graphql_controller.rb | 46 +- app/controllers/heartbeat_controller.rb | 6 +- app/controllers/index_controller.rb | 48 +- app/controllers/media_controller.rb | 117 +- app/controllers/members_controller.rb | 134 +- app/controllers/metadata_controller.rb | 116 +- app/controllers/old_events_controller.rb | 305 ++-- app/controllers/organizations_controller.rb | 172 ++- app/controllers/prefixes_controller.rb | 191 ++- .../provider_prefixes_controller.rb | 206 ++- app/controllers/providers_controller.rb | 358 +++-- app/controllers/random_controller.rb | 2 + app/controllers/repositories_controller.rb | 312 +++-- .../repository_prefixes_controller.rb | 202 ++- app/controllers/resource_types_controller.rb | 12 +- app/controllers/sessions_controller.rb | 68 +- app/controllers/works_controller.rb | 200 ++- ...elasticsearch_model_response_connection.rb | 278 ++-- app/graphql/connections/hash_connection.rb | 105 +- app/graphql/elasticsearch_loader.rb | 4 +- app/graphql/lupo_schema.rb | 18 +- app/graphql/mutations/base_mutation.rb | 2 + app/graphql/mutations/create_claim.rb | 77 +- app/graphql/mutations/delete_claim.rb | 33 +- app/graphql/resolvers/base.rb | 5 +- app/graphql/resolvers/claims.rb | 30 +- app/graphql/types/actor_item.rb | 8 +- app/graphql/types/address_type.rb | 6 +- .../audiovisual_connection_with_total_type.rb | 4 +- app/graphql/types/base_connection.rb | 81 +- app/graphql/types/base_enum.rb | 3 +- app/graphql/types/base_input_object.rb | 3 +- app/graphql/types/base_object.rb | 23 +- app/graphql/types/base_scalar.rb | 3 +- app/graphql/types/base_union.rb | 3 +- ...book_chapter_connection_with_total_type.rb | 4 +- .../types/book_connection_with_total_type.rb | 4 +- app/graphql/types/claim_type.rb | 12 +- .../collection_connection_with_total_type.rb | 4 +- ...erence_paper_connection_with_total_type.rb | 4 +- app/graphql/types/container_type.rb | 10 +- app/graphql/types/contributor_type.rb | 20 +- app/graphql/types/creator_type.rb | 16 +- ...data_catalog_connection_with_total_type.rb | 4 +- app/graphql/types/data_catalog_type.rb | 128 +- ...agement_plan_connection_with_total_type.rb | 4 +- .../data_paper_connection_with_total_type.rb | 4 +- .../dataset_connection_with_total_type.rb | 53 +- app/graphql/types/dataset_type.rb | 16 +- app/graphql/types/date_type.rb | 8 +- app/graphql/types/defined_term_type.rb | 14 +- app/graphql/types/description_type.rb | 4 +- ...dissertation_connection_with_total_type.rb | 4 +- app/graphql/types/doi_item.rb | 530 +++++-- app/graphql/types/employment_type.rb | 20 +- .../types/event_connection_with_total_type.rb | 4 +- app/graphql/types/event_data_edge.rb | 19 +- app/graphql/types/event_data_edge_type.rb | 12 +- .../funder_connection_with_total_type.rb | 9 +- app/graphql/types/funder_type.rb | 105 +- app/graphql/types/funding_type.rb | 28 +- app/graphql/types/geolocation_box_type.rb | 24 +- app/graphql/types/geolocation_point_type.rb | 12 +- app/graphql/types/geolocation_type.rb | 18 +- app/graphql/types/identifier_type.rb | 16 +- .../types/image_connection_with_total_type.rb | 4 +- .../instrument_connection_with_total_type.rb | 4 +- ...ive_resource_connection_with_total_type.rb | 4 +- ...rnal_article_connection_with_total_type.rb | 4 +- app/graphql/types/member_prefix_type.rb | 10 +- app/graphql/types/member_type.rb | 200 ++- .../types/model_connection_with_total_type.rb | 4 +- ...organization_connection_with_total_type.rb | 27 +- app/graphql/types/organization_type.rb | 193 ++- .../types/other_connection_with_total_type.rb | 4 +- .../person_connection_with_total_type.rb | 62 +- app/graphql/types/person_type.rb | 110 +- ...sical_object_connection_with_total_type.rb | 4 +- app/graphql/types/prefix_type.rb | 6 +- .../preprint_connection_with_total_type.rb | 4 +- .../publication_connection_with_total_type.rb | 51 +- app/graphql/types/query_type.rb | 12 +- app/graphql/types/related_identifier_type.rb | 36 +- app/graphql/types/repository_prefix_type.rb | 12 +- app/graphql/types/repository_type.rb | 153 ++- app/graphql/types/resource_type_type.rb | 14 +- app/graphql/types/rights_type.rb | 28 +- .../service_connection_with_total_type.rb | 4 +- .../types/software_application_type.rb | 8 +- .../software_connection_with_total_type.rb | 49 +- .../types/sound_connection_with_total_type.rb | 4 +- app/graphql/types/subject_type.rb | 25 +- app/graphql/types/title_type.rb | 4 +- app/graphql/types/usage_report_type.rb | 30 +- .../types/work_connection_with_total_type.rb | 7 +- .../workflow_connection_with_total_type.rb | 4 +- app/graphql/types/year_month_total_type.rb | 4 +- app/helpers/application_helper.rb | 5 +- .../activity_convert_affiliation_by_id_job.rb | 5 +- app/jobs/activity_import_by_id_job.rb | 5 +- app/jobs/affiliation_job.rb | 27 +- app/jobs/application_job.rb | 5 +- .../camelcase_nested_objects_by_id_job.rb | 2 + app/jobs/datacite_doi_import_by_id_job.rb | 5 +- app/jobs/delete_event_by_attribute_job.rb | 2 + app/jobs/delete_job.rb | 2 + app/jobs/doi_convert_affiliation_by_id_job.rb | 5 +- app/jobs/doi_convert_container_by_id_job.rb | 5 +- app/jobs/doi_job.rb | 2 + app/jobs/doi_refresh_job.rb | 5 +- app/jobs/event_import_by_id_job.rb | 2 + app/jobs/event_registrant_update_by_id_job.rb | 61 +- app/jobs/event_registrant_update_job.rb | 2 + app/jobs/handle_job.rb | 5 +- app/jobs/import_doi_job.rb | 2 + app/jobs/index_background_job.rb | 5 +- app/jobs/index_job.rb | 5 +- app/jobs/loop_through_dois_job.rb | 2 + app/jobs/orcid_auto_update_by_id_job.rb | 43 +- app/jobs/orcid_auto_update_job.rb | 2 + app/jobs/other_doi_by_id_job.rb | 5 +- app/jobs/other_doi_import_by_id_job.rb | 5 +- app/jobs/other_doi_job.rb | 2 + app/jobs/other_doi_refresh_job.rb | 5 +- app/jobs/schema_version_job.rb | 17 +- app/jobs/subj_check_by_id_job.rb | 2 + app/jobs/subj_check_job.rb | 2 + app/jobs/target_doi_by_id_job.rb | 5 +- app/jobs/transfer_client_job.rb | 9 +- app/jobs/transfer_job.rb | 15 +- app/jobs/update_doi_job.rb | 8 +- app/jobs/update_provider_id_job.rb | 8 +- app/jobs/update_state_job.rb | 9 +- app/jobs/url_job.rb | 31 +- app/models/ability.rb | 183 ++- app/models/activity.rb | 205 ++- app/models/application_record.rb | 2 + app/models/client.rb | 693 ++++++---- app/models/client_prefix.rb | 97 +- app/models/concerns/authenticable.rb | 106 +- app/models/concerns/authorable.rb | 64 +- app/models/concerns/batch_loader_helper.rb | 2 + app/models/concerns/cacheable.rb | 47 +- app/models/concerns/crosscitable.rb | 75 +- app/models/concerns/dateable.rb | 23 +- app/models/concerns/helpable.rb | 202 ++- app/models/concerns/identifiable.rb | 33 +- app/models/concerns/indexable.rb | 826 +++++++---- app/models/concerns/mailable.rb | 179 ++- app/models/concerns/metadatable.rb | 11 +- app/models/concerns/modelable.rb | 11 +- app/models/concerns/passwordable.rb | 10 +- app/models/concerns/processable.rb | 2 + app/models/concerns/searchable.rb | 6 +- app/models/concerns/userable.rb | 16 +- app/models/concerns/wikidatable.rb | 82 +- app/models/data_catalog.rb | 21 +- app/models/datacite_doi.rb | 82 +- app/models/doi.rb | 222 ++- app/models/event.rb | 606 +++++--- app/models/funder.rb | 35 +- app/models/handle.rb | 22 +- app/models/heartbeat.rb | 2 + app/models/media.rb | 11 +- app/models/metadata.rb | 23 +- app/models/organization.rb | 90 +- app/models/other_doi.rb | 94 +- app/models/person.rb | 147 +- app/models/phrase.rb | 2 + app/models/prefix.rb | 79 +- app/models/provider.rb | 486 +++++-- app/models/provider_prefix.rb | 102 +- app/models/resource_type.rb | 96 +- app/models/usage_report.rb | 24 +- app/models/user.rb | 101 +- app/serializers/activity_serializer.rb | 10 +- app/serializers/client_prefix_serializer.rb | 2 + app/serializers/client_serializer.rb | 50 +- app/serializers/data_center_serializer.rb | 10 +- app/serializers/datacite_doi_serializer.rb | 211 ++- app/serializers/download_serializer.rb | 13 +- app/serializers/event_serializer.rb | 16 +- app/serializers/media_serializer.rb | 2 + app/serializers/member_serializer.rb | 18 +- app/serializers/metadata_serializer.rb | 2 + app/serializers/object_serializer.rb | 15 +- app/serializers/old_event_serializer.rb | 13 +- app/serializers/old_object_serializer.rb | 14 +- app/serializers/prefix_serializer.rb | 2 + app/serializers/provider_prefix_serializer.rb | 2 + app/serializers/provider_serializer.rb | 152 +- .../repository_prefix_serializer.rb | 5 +- app/serializers/repository_serializer.rb | 55 +- app/serializers/resource_type_serializer.rb | 2 + app/serializers/view_serializer.rb | 13 +- app/serializers/work_serializer.rb | 44 +- .../billing_information_validator.rb | 6 +- app/validators/contact_validator.rb | 5 +- app/validators/xml_schema_validator.rb | 25 +- bin/bundle | 2 + bin/rails | 2 + bin/rake | 2 + bin/rspec | 2 + bin/setup | 2 + bin/spring | 1 + bin/update | 2 + config.ru | 2 + config/application.rb | 73 +- config/boot.rb | 2 + config/environment.rb | 2 + config/environments/development.rb | 2 + config/environments/production.rb | 4 +- config/environments/stage.rb | 2 + config/environments/test.rb | 4 +- config/initializers/_shoryuken.rb | 4 +- config/initializers/_token.rb | 2 +- config/initializers/backtrace_silencers.rb | 1 + config/initializers/constants.rb | 57 +- config/initializers/cors.rb | 8 +- config/initializers/elasticsearch.rb | 43 +- .../initializers/filter_parameter_logging.rb | 2 +- config/initializers/flipper.rb | 10 +- config/initializers/inflections.rb | 2 +- config/initializers/mime_types.rb | 55 +- .../new_framework_defaults_5_2.rb | 1 + config/initializers/paperclip.rb | 2 + config/initializers/sentry.rb | 10 +- config/initializers/turnout.rb | 5 +- config/initializers/wrap_parameters.rb | 4 +- config/routes.rb | 180 ++- config/spring.rb | 2 + .../20170807091814_create_all_tables.rb | 66 +- ...916141643_add_allocator_prefixes_column.rb | 2 +- db/migrate/20170926083943_add_url_index.rb | 2 +- .../20170928202815_addre3data_column.rb | 2 +- db/migrate/20171109120529_add_aasm_column.rb | 2 +- .../20171202002420_rename_state_column.rb | 4 +- .../20171202090754_change_url_column_type.rb | 2 +- db/migrate/20180116230054_add_client_url.rb | 2 +- db/migrate/20180306172317_add_json_column.rb | 2 +- ...20180310064742_landing_page_url_as_text.rb | 18 +- ...80330040550_add_institution_type_column.rb | 6 +- ...te_active_storage_tables.active_storage.rb | 23 +- .../20180505084805_remove_crosscite_column.rb | 2 +- .../20180731090122_add_source_column.rb | 2 +- ..._last_landing_page_status_result_column.rb | 5 +- ...1015152049_microseconds_in_time_columns.rb | 13 +- db/migrate/20181023235649_add_focus_area.rb | 8 +- .../20181102094810_add_schema_attributes.rb | 2 +- .../20181216071910_schema_version_index.rb | 2 +- db/migrate/20190302161113_install_audited.rb | 8 +- ...0409211358_change_media_url_column_type.rb | 2 +- db/migrate/20190604093226_add_events_table.rb | 23 +- .../20190727170040_add_client_fields.rb | 4 +- db/migrate/20200122153731_add_globus_uuid.rb | 10 +- .../20200131180609_add_events_properties.rb | 8 +- ...191027_add_attachment_logo_to_providers.rb | 2 + .../20200313163242_rename_prefix_tables.rb | 6 +- db/migrate/20200826173254_add_agency_index.rb | 6 +- .../20201019125327_change_domains_column.rb | 4 +- db/schema.rb | 203 ++- db/seeds/development/base.seeds.rb | 32 +- .../development/consortium_transfer.seeds.rb | 32 +- .../development/researcher_profile.seeds.rb | 23 +- spec/concerns/authenticable_spec.rb | 206 ++- spec/concerns/countable_spec.rb | 113 +- spec/concerns/crosscitable_spec.rb | 472 +++++-- spec/concerns/facetable_spec.rb | 1 + spec/concerns/helpable_spec.rb | 172 ++- spec/concerns/indexable_spec.rb | 18 +- spec/concerns/mailable_spec.rb | 79 +- spec/concerns/modelable_spec.rb | 2 + spec/concerns/paginatable_spec.rb | 21 +- spec/concerns/passwordable_spec.rb | 2 + spec/concerns/wikidatable_spec.rb | 75 +- spec/controllers/clients_controller_spec.rb | 1 + spec/controllers/dois_controller_spec.rb | 1 + spec/controllers/media_controller_spec.rb | 1 + spec/controllers/metadata_controller_spec.rb | 1 + spec/controllers/prefixes_controller_spec.rb | 1 + spec/controllers/providers_controller_spec.rb | 1 + spec/factories/default.rb | 103 +- spec/graphql/printout_spec.rb | 2 + spec/graphql/requests/me_type_spec.rb | 1 + spec/graphql/types/actor_item_spec.rb | 52 +- spec/graphql/types/address_type_spec.rb | 2 + spec/graphql/types/audiovisual_type_spec.rb | 2 + spec/graphql/types/book_chapter_type_spec.rb | 72 +- spec/graphql/types/book_type_spec.rb | 66 +- spec/graphql/types/collection_type_spec.rb | 2 + spec/graphql/types/conference_paper_type.spec | 2 + spec/graphql/types/contributor_type_spec.rb | 2 + spec/graphql/types/country_type_spec.rb | 2 + spec/graphql/types/creator_type_spec.rb | 2 + spec/graphql/types/data_catalog_type_spec.rb | 71 +- .../types/data_management_plan_type_spec.rb | 471 +++++-- spec/graphql/types/dataset_type_spec.rb | 443 ++++-- spec/graphql/types/date_type_spec.rb | 2 + spec/graphql/types/defined_term_type_spec.rb | 2 + spec/graphql/types/description_type_spec.rb | 2 + spec/graphql/types/dissertation_type_spec.rb | 245 +++- spec/graphql/types/doi_item_spec.rb | 36 +- spec/graphql/types/employment_type_spec.rb | 2 + spec/graphql/types/event_type_spec.rb | 2 + spec/graphql/types/facet_type_spec.rb | 2 + spec/graphql/types/funder_type_spec.rb | 145 +- spec/graphql/types/funding_type_spec.rb | 2 + spec/graphql/types/identifier_type_spec.rb | 2 + spec/graphql/types/image_type_spec.rb | 2 + spec/graphql/types/instrument_type_spec.rb | 21 +- .../types/interactive_resource_type_spec.rb | 2 + spec/graphql/types/issn_type_spec.rb | 2 + .../types/journal_article_type_spec.rb | 72 +- spec/graphql/types/label_type_spec.rb | 2 + spec/graphql/types/language_type_spec.rb | 2 + spec/graphql/types/me_type_spec.rb | 17 +- spec/graphql/types/member_prefix_type_spec.rb | 2 + spec/graphql/types/member_type_spec.rb | 121 +- spec/graphql/types/model_type_spec.rb | 2 + spec/graphql/types/organization_type_spec.rb | 718 +++++++--- spec/graphql/types/peer_review_type_spec.rb | 68 +- spec/graphql/types/person_type_spec.rb | 293 +++- .../types/physical_object_type_spec.rb | 2 + spec/graphql/types/prefix_type_spec.rb | 2 + spec/graphql/types/preprint_type_spec.rb | 97 +- spec/graphql/types/publication_type_spec.rb | 2 + spec/graphql/types/query_type_spec.rb | 132 +- .../types/registration_agency_type_spec.rb | 2 + .../types/repository_prefix_type_spec.rb | 2 + spec/graphql/types/repository_type_spec.rb | 141 +- spec/graphql/types/service_type_spec.rb | 114 +- spec/graphql/types/software_type_spec.rb | 28 +- spec/graphql/types/sound_type_spec.rb | 2 + spec/graphql/types/title_type_spec.rb | 2 + spec/graphql/types/work_type_spec.rb | 644 ++++++--- spec/graphql/types/workflow_type_spec.rb | 2 + spec/jobs/activity_import_by_id_job_spec.rb | 7 +- .../datacite_doi_import_by_id_job_spec.rb | 7 +- spec/jobs/event_import_by_id_job_spec.rb | 7 +- .../event_registrant_update_by_id_job_spec.rb | 7 +- spec/jobs/handle_job_spec.rb | 5 +- spec/jobs/import_doi_job_spec.rb | 9 +- spec/jobs/index_background_job_spec.rb | 9 +- spec/jobs/index_job_spec.rb | 7 +- spec/jobs/orcid_auto_update_by_id_job_spec.rb | 1 + spec/jobs/other_doi_by_id_job.rb | 9 +- spec/jobs/other_doi_import_by_id_job_spec.rb | 7 +- spec/jobs/target_doi_by_id_job_spec.rb | 9 +- spec/jobs/update_doi_job_spec.rb | 9 +- spec/jobs/update_state_job_spec.rb | 7 +- spec/jobs/url_job_spec.rb | 5 +- spec/lib/tasks/datacite_doi_rake_spec.rb | 2 + spec/lib/tasks/doi_rake_spec.rb | 45 +- spec/lib/tasks/other_doi_rake_spec.rb | 2 + spec/models/ability_spec.rb | 51 +- spec/models/activity_spec.rb | 27 +- spec/models/client_prefix_spec.rb | 13 +- spec/models/client_spec.rb | 123 +- spec/models/data_catalog_spec.rb | 22 +- spec/models/datacite_doi_spec.rb | 19 +- spec/models/doi_spec.rb | 2 + spec/models/event_spec.rb | 150 +- spec/models/funder_spec.rb | 32 +- spec/models/handler_spec.rb | 1 + spec/models/media_spec.rb | 2 + spec/models/metadata_spec.rb | 2 + spec/models/organization_spec.rb | 191 ++- spec/models/other_doi_spec.rb | 6 +- spec/models/person_spec.rb | 150 +- spec/models/prefix_spec.rb | 5 +- spec/models/provider_prefix_spec.rb | 2 + spec/models/provider_spec.rb | 110 +- spec/models/usage_report_spec.rb | 23 +- spec/models/user_spec.rb | 42 +- spec/rails_helper.rb | 17 +- spec/requests/activities_spec.rb | 116 +- spec/requests/client_prefixes_spec.rb | 65 +- spec/requests/clients_spec.rb | 275 ++-- spec/requests/datacite_dois_spec.rb | 2 + spec/requests/events_spec.rb | 709 +++++++--- spec/requests/exports_spec.rb | 166 ++- spec/requests/heartbeat_spec.rb | 2 + spec/requests/index_spec.rb | 106 +- spec/requests/media_spec.rb | 140 +- spec/requests/members_spec.rb | 7 +- spec/requests/metadata_spec.rb | 112 +- spec/requests/old_events_spec.rb | 665 ++++++--- spec/requests/prefixes_spec.rb | 39 +- spec/requests/provider_prefixes_spec.rb | 103 +- spec/requests/providers_spec.rb | 929 ++++++++----- spec/requests/random_spec.rb | 9 +- spec/requests/repositories_spec.rb | 399 ++++-- spec/requests/repository_prefixes_spec.rb | 91 +- spec/requests/sessions_spec.rb | 57 +- spec/requests/works_spec.rb | 108 +- spec/routing/clients_routing_spec.rb | 2 + spec/routing/dois_routing_spec.rb | 2 + spec/routing/media_routing_spec.rb | 32 +- spec/routing/metadata_routing_spec.rb | 32 +- spec/routing/prefixes_routing_spec.rb | 2 + spec/routing/providers_routing_spec.rb | 2 + spec/routing/repositories_routing_spec.rb | 12 +- spec/support/database_cleaner_helper.rb | 18 +- spec/support/elasticsearch_helper.rb | 31 +- spec/support/job_helper.rb | 2 + spec/support/request_helper.rb | 2 + spec/support/shared_contexts/rake.rb | 16 +- spec/support/shared_examples_for_sti.rb | 2 + spec/support/task_helper.rb | 8 +- 426 files changed, 19219 insertions(+), 8352 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 142ddf9c3..d4480de13 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,658 +1,276 @@ -# from https://raw.githubusercontent.com/thoughtbot/guides/master/style/ruby/.rubocop.yml -# modification: exclude "vendor/bundle/**/*" +require: + - rubocop-packaging + - rubocop-performance + - rubocop-rails + - rubocop-rspec AllCops: + TargetRubyVersion: 2.6 + # RuboCop has a bunch of cops enabled by default. This setting tells RuboCop + # to ignore them, so only the ones explicitly set in this file are enabled. + DisabledByDefault: true + Exclude: + - '**/tmp/**/*' + - '**/templates/**/*' + - '**/vendor/**/*' + - 'actionpack/lib/action_dispatch/journey/parser.rb' + - 'actionmailbox/test/dummy/**/*' + - 'actiontext/test/dummy/**/*' + - '**/node_modules/**/*' + - 'vendor/bundle/**/*' + +Performance: Exclude: - - db/schema.rb - - vendor/bundle/**/* + - '**/test/**/*' -require: - - rubocop-rails - - rubocop-performance +# Prefer assert_not over assert ! +Rails/AssertNot: + Include: + - '**/test/**/*' -Naming/AccessorMethodName: - Description: Check the naming of accessor methods for get_/set_. - Enabled: false +# Prefer assert_not_x over refute_x +Rails/RefuteMethods: + Include: + - '**/test/**/*' -Style/Alias: - Description: 'Use alias_method instead of alias.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#alias-method' - Enabled: false +Rails/IndexBy: + Enabled: true -Style/ArrayJoin: - Description: 'Use Array#join instead of Array#*.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#array-join' - Enabled: false +Rails/IndexWith: + Enabled: true -Style/AsciiComments: - Description: 'Use only ascii symbols in comments.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#english-comments' - Enabled: false +# Prefer &&/|| over and/or. +Style/AndOr: + Enabled: true -Naming/AsciiIdentifiers: - Description: 'Use only ascii symbols in identifiers.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#english-identifiers' - Enabled: false +# Align `when` with `case`. +Layout/CaseIndentation: + Enabled: true -Style/Attr: - Description: 'Checks for uses of Module#attr.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#attr' - Enabled: false +Layout/ClosingHeredocIndentation: + Enabled: true -Metrics/BlockNesting: - Description: 'Avoid excessive block nesting' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#three-is-the-number-thou-shalt-count' - Enabled: false +# Align comments with method definitions. +Layout/CommentIndentation: + Enabled: true -Style/CaseEquality: - Description: 'Avoid explicit use of the case equality operator(===).' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-case-equality' - Enabled: false +Layout/ElseAlignment: + Enabled: true -Style/CharacterLiteral: - Description: 'Checks for uses of character literals.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-character-literals' - Enabled: false +# Align `end` with the matching keyword or starting expression except for +# assignments, where it should be aligned with the LHS. +Layout/EndAlignment: + Enabled: true + EnforcedStyleAlignWith: variable + AutoCorrect: true -Style/ClassAndModuleChildren: - Description: 'Checks style of children classes and modules.' +Layout/EmptyLineAfterMagicComment: Enabled: true - EnforcedStyle: nested -Metrics/ClassLength: - Description: 'Avoid classes longer than 100 lines of code.' - Enabled: false +Layout/EmptyLinesAroundAccessModifier: + Enabled: true + EnforcedStyle: only_before -Metrics/ModuleLength: - Description: 'Avoid modules longer than 100 lines of code.' - Enabled: false +Layout/EmptyLinesAroundBlockBody: + Enabled: true -Style/ClassVars: - Description: 'Avoid the use of class variables.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-class-vars' - Enabled: false +# In a regular class definition, no empty lines around the body. +Layout/EmptyLinesAroundClassBody: + Enabled: true -Style/CollectionMethods: +# In a regular method definition, no empty lines around the body. +Layout/EmptyLinesAroundMethodBody: Enabled: true - PreferredMethods: - find: detect - inject: reduce - collect: map - find_all: select -Style/ColonMethodCall: - Description: 'Do not use :: for method call.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#double-colons' - Enabled: false - -Style/CommentAnnotation: - Description: >- - Checks formatting of special comments - (TODO, FIXME, OPTIMIZE, HACK, REVIEW). - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#annotate-keywords' - Enabled: false - -Metrics/AbcSize: - Description: >- - A calculated magnitude based on number of assignments, - branches, and conditions. - Enabled: false - -Metrics/BlockLength: - CountComments: true # count full line comments? - Max: 25 - ExcludedMethods: [] - Exclude: - - "spec/**/*" - -Metrics/CyclomaticComplexity: - Description: >- - A complexity metric that is strongly correlated to the number - of test cases needed to validate a method. - Enabled: false - -Rails/Delegate: - Description: 'Prefer delegate method for delegations.' - Enabled: false - -Style/PreferredHashMethods: - Description: 'Checks use of `has_key?` and `has_value?` Hash methods.' - StyleGuide: '#hash-key' - Enabled: false - -Style/Documentation: - Description: 'Document classes and non-namespace modules.' - Enabled: false - -Style/DoubleNegation: - Description: 'Checks for uses of double negation (!!).' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-bang-bang' - Enabled: false - -Style/EachWithObject: - Description: 'Prefer `each_with_object` over `inject` or `reduce`.' - Enabled: false - -Style/EmptyLiteral: - Description: 'Prefer literals to Array.new/Hash.new/String.new.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#literal-array-hash' - Enabled: false - -# Checks whether the source file has a utf-8 encoding comment or not -# AutoCorrectEncodingComment must match the regex -# /#.*coding\s?[:=]\s?(?:UTF|utf)-8/ -Style/Encoding: - Enabled: false - -Style/EvenOdd: - Description: 'Favor the use of Fixnum#even? && Fixnum#odd?' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#predicate-methods' - Enabled: false - -Naming/FileName: - Description: 'Use snake_case for source file names.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#snake-case-files' - Enabled: false +# In a regular module definition, no empty lines around the body. +Layout/EmptyLinesAroundModuleBody: + Enabled: true + +# Use Ruby >= 1.9 syntax for hashes. Prefer { a: :b } over { :a => :b }. +Style/HashSyntax: + Enabled: true + +Layout/FirstArgumentIndentation: + Enabled: true + +# Method definitions after `private` or `protected` isolated calls need one +# extra level of indentation. +Layout/IndentationConsistency: + Enabled: true + EnforcedStyle: indented_internal_methods + +# Two spaces, no tabs (for indentation). +Layout/IndentationWidth: + Enabled: true + +Layout/LeadingCommentSpace: + Enabled: true + +Layout/SpaceAfterColon: + Enabled: true + +Layout/SpaceAfterComma: + Enabled: true + +Layout/SpaceAfterSemicolon: + Enabled: true + +Layout/SpaceAroundEqualsInParameterDefault: + Enabled: true + +Layout/SpaceAroundKeyword: + Enabled: true + +Layout/SpaceAroundOperators: + Enabled: true + +Layout/SpaceBeforeComma: + Enabled: true + +Layout/SpaceBeforeComment: + Enabled: true + +Layout/SpaceBeforeFirstArg: + Enabled: true + +Style/DefWithParentheses: + Enabled: true + +# Defining a method with parameters needs parentheses. +Style/MethodDefParentheses: + Enabled: true Style/FrozenStringLiteralComment: - Description: >- - Add the frozen_string_literal comment to the top of files - to help transition from Ruby 2.3.0 to Ruby 3.0. - Enabled: false - -Lint/FlipFlop: - Description: 'Checks for flip flops' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-flip-flops' - Enabled: false - -Style/FormatString: - Description: 'Enforce the use of Kernel#sprintf, Kernel#format or String#%.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#sprintf' - Enabled: false - -Style/GlobalVars: - Description: 'Do not introduce global variables.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#instance-vars' - Reference: 'http://www.zenspider.com/Languages/Ruby/QuickRef.html' - Enabled: false - -Style/GuardClause: - Description: 'Check for conditionals that can be replaced with guard clauses' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-nested-conditionals' - Enabled: false - -Style/IfUnlessModifier: - Description: >- - Favor modifier if/unless usage when you have a - single-line body. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#if-as-a-modifier' - Enabled: false - -Style/IfWithSemicolon: - Description: 'Do not use if x; .... Use the ternary operator instead.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-semicolon-ifs' - Enabled: false - -Style/InlineComment: - Description: 'Avoid inline comments.' - Enabled: false - -Style/Lambda: - Description: 'Use the new lambda literal syntax for single-line blocks.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#lambda-multi-line' - Enabled: false - -Style/LambdaCall: - Description: 'Use lambda.call(...) instead of lambda.(...).' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#proc-call' - Enabled: false - -Style/LineEndConcatenation: - Description: >- - Use \ instead of + or << to concatenate two string literals at - line end. - Enabled: false - -Metrics/LineLength: - Description: 'Limit lines to 80 characters.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#80-character-limits' - Max: 80 - -Metrics/MethodLength: - Description: 'Avoid methods longer than 10 lines of code.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#short-methods' - Enabled: false - -Style/ModuleFunction: - Description: 'Checks for usage of `extend self` in modules.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#module-function' - Enabled: false - -Style/MultilineBlockChain: - Description: 'Avoid multi-line chains of blocks.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#single-line-blocks' - Enabled: false - -Style/NegatedIf: - Description: >- - Favor unless over if for negative conditions - (or control flow or). - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#unless-for-negatives' - Enabled: false - -Style/NegatedWhile: - Description: 'Favor until over while for negative conditions.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#until-for-negatives' - Enabled: false - -Style/Next: - Description: 'Use `next` to skip iteration instead of a condition at the end.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-nested-conditionals' - Enabled: false - -Style/NilComparison: - Description: 'Prefer x.nil? to x == nil.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#predicate-methods' - Enabled: false - -Style/Not: - Description: 'Use ! instead of not.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#bang-not-not' - Enabled: false - -Style/NumericLiterals: - Description: >- - Add underscores to large numeric literals to improve their - readability. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#underscores-in-numerics' - Enabled: false - -Style/OneLineConditional: - Description: >- - Favor the ternary operator(?:) over - if/then/else/end constructs. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#ternary-operator' - Enabled: false - -Naming/BinaryOperatorParameterName: - Description: 'When defining binary operators, name the argument other.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#other-arg' - Enabled: false - -Metrics/ParameterLists: - Description: 'Avoid parameter lists longer than three or four parameters.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#too-many-params' - Enabled: false - -Style/PercentLiteralDelimiters: - Description: 'Use `%`-literal delimiters consistently' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#percent-literal-braces' - Enabled: false - -Style/PerlBackrefs: - Description: 'Avoid Perl-style regex back references.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-perl-regexp-last-matchers' - Enabled: false - -Naming/PredicateName: - Description: 'Check the names of predicate methods.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#bool-methods-qmark' - ForbiddenPrefixes: - - is_ + Enabled: true + EnforcedStyle: always Exclude: - - spec/**/* - -Style/Proc: - Description: 'Use proc instead of Proc.new.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#proc' - Enabled: false - -Style/RaiseArgs: - Description: 'Checks the arguments passed to raise/fail.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#exception-class-messages' - Enabled: false - -Style/RegexpLiteral: - Description: 'Use / or %r around regular expressions.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#percent-r' - Enabled: false - -Style/Sample: - Description: >- - Use `sample` instead of `shuffle.first`, - `shuffle.last`, and `shuffle[Fixnum]`. - Reference: 'https://github.com/JuanitoFatas/fast-ruby#arrayshufflefirst-vs-arraysample-code' - Enabled: false - -Style/SelfAssignment: - Description: >- - Checks for places where self-assignment shorthand should have - been used. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#self-assignment' - Enabled: false - -Style/SingleLineBlockParams: - Description: 'Enforces the names of some block params.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#reduce-blocks' - Enabled: false - -Style/SingleLineMethods: - Description: 'Avoid single-line methods.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-single-line-methods' - Enabled: false - -Style/SignalException: - Description: 'Checks for proper usage of fail and raise.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#fail-method' - Enabled: false - -Style/SpecialGlobalVars: - Description: 'Avoid Perl-style global variables.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-cryptic-perlisms' - Enabled: false + - 'actionview/test/**/*.builder' + - 'actionview/test/**/*.ruby' + - 'actionpack/test/**/*.builder' + - 'actionpack/test/**/*.ruby' + - 'activestorage/db/migrate/**/*.rb' + - 'activestorage/db/update_migrate/**/*.rb' + - 'actionmailbox/db/migrate/**/*.rb' + - 'actiontext/db/migrate/**/*.rb' + +Style/RedundantFreeze: + Enabled: true +# Use `foo {}` not `foo{}`. +Layout/SpaceBeforeBlockBraces: + Enabled: true + +# Use `foo { bar }` not `foo {bar}`. +Layout/SpaceInsideBlockBraces: + Enabled: true + EnforcedStyleForEmptyBraces: space + +# Use `{ a: 1 }` not `{a:1}`. +Layout/SpaceInsideHashLiteralBraces: + Enabled: true + +Layout/SpaceInsideParens: + Enabled: true + +# Check quotes usage according to lint rule below. Style/StringLiterals: - Description: 'Checks if uses of quotes match the configured preference.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#consistent-string-literals' - EnforcedStyle: double_quotes Enabled: true + EnforcedStyle: double_quotes -Style/TrailingCommaInArguments: - Description: 'Checks for trailing comma in argument lists.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-trailing-array-commas' - EnforcedStyleForMultiline: comma - SupportedStylesForMultiline: - - comma - - consistent_comma - - no_comma +# Detect hard tabs, no hard tabs. +Layout/IndentationStyle: Enabled: true -Style/TrailingCommaInArrayLiteral: - Description: 'Checks for trailing comma in array literals.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-trailing-array-commas' - EnforcedStyleForMultiline: comma - SupportedStylesForMultiline: - - comma - - consistent_comma - - no_comma +# Empty lines should not have any spaces. +Layout/TrailingEmptyLines: Enabled: true -Style/TrailingCommaInHashLiteral: - Description: 'Checks for trailing comma in hash literals.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-trailing-array-commas' - EnforcedStyleForMultiline: comma - SupportedStylesForMultiline: - - comma - - consistent_comma - - no_comma +# No trailing whitespace. +Layout/TrailingWhitespace: Enabled: true -Style/TrivialAccessors: - Description: 'Prefer attr_* methods to trivial readers/writers.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#attr_family' - Enabled: false - -Style/VariableInterpolation: - Description: >- - Don't interpolate global, instance and class variables - directly in strings. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#curlies-interpolate' - Enabled: false - -Style/WhenThen: - Description: 'Use when x then ... for one-line cases.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#one-line-cases' - Enabled: false - -Style/WhileUntilModifier: - Description: >- - Favor modifier while/until usage when you have a - single-line body. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#while-as-a-modifier' - Enabled: false - -Style/WordArray: - Description: 'Use %w or %W for arrays of words.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#percent-w' - Enabled: false - -# Layout - -Layout/ParameterAlignment: - Description: 'Here we check if the parameters on a multi-line method call or definition are aligned.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-double-indent' - Enabled: false - -Layout/ConditionPosition: - Description: >- - Checks for condition placed in a confusing position relative to - the keyword. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#same-line-condition' - Enabled: false - -Layout/DotPosition: - Description: 'Checks the position of the dot in multi-line method calls.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#consistent-multi-line-chains' - EnforcedStyle: trailing - -Layout/ExtraSpacing: - Description: 'Do not use unnecessary spacing.' - Enabled: true - -Layout/MultilineOperationIndentation: - Description: >- - Checks indentation of binary operations that span more than - one line. - Enabled: true - EnforcedStyle: indented - -Layout/MultilineMethodCallIndentation: - Description: >- - Checks indentation of method calls with the dot operator - that span more than one line. - Enabled: true - EnforcedStyle: indented - -Layout/InitialIndentation: - Description: >- - Checks the indentation of the first non-blank non-comment line in a file. - Enabled: false - -# Lint +# Use quotes for string literals when they are enough. +Style/RedundantPercentQ: + Enabled: true Lint/AmbiguousOperator: - Description: >- - Checks for ambiguous operators in the first argument of a - method invocation without parentheses. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#parens-as-args' - Enabled: false + Enabled: true Lint/AmbiguousRegexpLiteral: - Description: >- - Checks for ambiguous regexp literals in the first argument of - a method invocation without parenthesis. - Enabled: false + Enabled: true -Lint/AssignmentInCondition: - Description: "Don't use assignment in conditions." - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#safe-assignment-in-condition' - Enabled: false +Lint/ErbNewArguments: + Enabled: true -Lint/CircularArgumentReference: - Description: "Don't refer to the keyword argument in the default value." - Enabled: false +# Use my_method(my_arg) not my_method( my_arg ) or my_method my_arg. +Lint/RequireParentheses: + Enabled: true + +Lint/ShadowingOuterLocalVariable: + Enabled: true + +Lint/RedundantStringCoercion: + Enabled: true + +Lint/UriEscapeUnescape: + Enabled: true + +Lint/UselessAssignment: + Enabled: true Lint/DeprecatedClassMethods: - Description: 'Check for deprecated class method calls.' - Enabled: false - -Lint/DuplicateHashKey: - Description: 'Check for duplicate keys in hash literals.' - Enabled: false - -Lint/EachWithObjectArgument: - Description: 'Check for immutable argument given to each_with_object.' - Enabled: false - -Lint/ElseLayout: - Description: 'Check for odd code arrangement in an else block.' - Enabled: false - -Lint/FormatParameterMismatch: - Description: 'The number of parameters to format/sprint must match the fields.' - Enabled: false - -Lint/SuppressedException: - Description: "Don't suppress exception." - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#dont-hide-exceptions' - Enabled: false - -Lint/LiteralAsCondition: - Description: 'Checks of literals used in conditions.' - Enabled: false - -Lint/LiteralInInterpolation: - Description: 'Checks for literals used in interpolation.' - Enabled: false - -Lint/Loop: - Description: >- - Use Kernel#loop with break rather than begin/end/until or - begin/end/while for post-loop tests. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#loop-with-break' - Enabled: false - -Lint/NestedMethodDefinition: - Description: 'Do not use nested method definitions.' - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-nested-methods' - Enabled: false - -Lint/NonLocalExitFromIterator: - Description: 'Do not use return in iterator to cause non-local exit.' - Enabled: false - -Lint/ParenthesesAsGroupedExpression: - Description: >- - Checks for method calls with a space before the opening - parenthesis. - StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#parens-no-spaces' - Enabled: false + Enabled: true -Lint/RequireParentheses: - Description: >- - Use parentheses in the method call to avoid confusion - about precedence. - Enabled: false - -Lint/UnderscorePrefixedVariableName: - Description: 'Do not use prefix `_` for a variable that is used.' - Enabled: false - -Lint/RedundantCopDisableDirective: - Description: >- - Checks for rubocop:disable comments that can be removed. - Note: this cop is not disabled when disabling all cops. - It must be explicitly disabled. - Enabled: false - -Lint/Void: - Description: 'Possible use of operator/literal/variable in void context.' - Enabled: false - -# Performance - -Performance/CaseWhenSplat: - Description: >- - Place `when` conditions that use splat at the end - of the list of `when` branches. - Enabled: false - -Performance/Count: - Description: >- - Use `count` instead of `select...size`, `reject...size`, - `select...count`, `reject...count`, `select...length`, - and `reject...length`. - Enabled: false - -Performance/Detect: - Description: >- - Use `detect` instead of `select.first`, `find_all.first`, - `select.last`, and `find_all.last`. - Reference: 'https://github.com/JuanitoFatas/fast-ruby#enumerabledetect-vs-enumerableselectfirst-code' - Enabled: false +Style/ParenthesesAroundCondition: + Enabled: true + +Style/HashTransformKeys: + Enabled: true + +Style/HashTransformValues: + Enabled: true + +Style/RedundantBegin: + Enabled: true + +Style/RedundantReturn: + Enabled: true + AllowMultipleReturnValues: true + +Style/Semicolon: + Enabled: true + AllowAsExpressionSeparator: true + +# Prefer Foo.method over Foo::method +Style/ColonMethodCall: + Enabled: true + +Style/TrivialAccessors: + Enabled: true Performance/FlatMap: - Description: >- - Use `Enumerable#flat_map` - instead of `Enumerable#map...Array#flatten(1)` - or `Enumberable#collect..Array#flatten(1)` - Reference: 'https://github.com/JuanitoFatas/fast-ruby#enumerablemaparrayflatten-vs-enumerableflat_map-code' - Enabled: false + Enabled: true + +Performance/RedundantMerge: + Enabled: true + +Performance/StartWith: + Enabled: true + +Performance/EndWith: + Enabled: true + +Performance/RegexpMatch: + Enabled: true Performance/ReverseEach: - Description: 'Use `reverse_each` instead of `reverse.each`.' - Reference: 'https://github.com/JuanitoFatas/fast-ruby#enumerablereverseeach-vs-enumerablereverse_each-code' - Enabled: false - -Performance/Size: - Description: >- - Use `size` instead of `count` for counting - the number of elements in `Array` and `Hash`. - Reference: 'https://github.com/JuanitoFatas/fast-ruby#arraycount-vs-arraysize-code' - Enabled: false - -Performance/StringReplacement: - Description: >- - Use `tr` instead of `gsub` when you are replacing the same - number of characters. Use `delete` instead of `gsub` when - you are deleting characters. - Reference: 'https://github.com/JuanitoFatas/fast-ruby#stringgsub-vs-stringtr-code' - Enabled: false - -# Rails - -Rails/ActionFilter: - Description: 'Enforces consistent use of action filter methods.' - Enabled: false - -Rails/Date: - Description: >- - Checks the correct usage of date aware methods, - such as Date.today, Date.current etc. - Enabled: false - -Rails/FindBy: - Description: 'Prefer find_by over where.first.' - Enabled: false - -Rails/FindEach: - Description: 'Prefer all.find_each over all.find.' - Enabled: false - -Rails/HasAndBelongsToMany: - Description: 'Prefer has_many :through to has_and_belongs_to_many.' - Enabled: false - -Rails/Output: - Description: 'Checks for calls to puts, print, etc.' - Enabled: false - -Rails/ReadWriteAttribute: - Description: >- - Checks for read_attribute(:attr) and - write_attribute(:attr, val). - Enabled: false - -Rails/ScopeArgs: - Description: 'Checks the arguments of ActiveRecord scopes.' - Enabled: false - -Rails/TimeZone: - Description: 'Checks the correct usage of time zone aware methods.' - StyleGuide: 'https://github.com/bbatsov/rails-style-guide#time' - Reference: 'http://danilenko.org/2012/7/6/rails_timezones' - Enabled: false - -Rails/Validation: - Description: 'Use validates :attribute, hash of validations.' - Enabled: false \ No newline at end of file + Enabled: true + +Performance/UnfreezeString: + Enabled: true + +Performance/DeletePrefix: + Enabled: true + +Performance/DeleteSuffix: + Enabled: true diff --git a/Gemfile b/Gemfile index dfcde9f24..2dd37a8c6 100644 --- a/Gemfile +++ b/Gemfile @@ -80,13 +80,14 @@ group :development, :test do gem "binding_of_caller" gem "bullet", "~> 6.1" gem "byebug", platforms: %i[mri mingw x64_mingw] - gem "prettier", "~> 0.13.0" gem "rspec-benchmark", "~> 0.4.0" gem "rspec-graphql_matchers", "~> 1.1" gem "rspec-rails", "~> 3.8", ">= 3.8.2" - gem "rubocop", "~> 0.77.0" + gem "rubocop", "~> 1.3", ">= 1.3.1" gem "rubocop-performance", "~> 1.5", ">= 1.5.1" - gem "rubocop-rails", "~> 2.4" + gem "rubocop-rails", "~> 2.8", ">= 2.8.1" + gem "rubocop-packaging", "~> 0.5.1" + gem "rubocop-rspec", "~> 2.0", require: false end group :development do diff --git a/Gemfile.lock b/Gemfile.lock index 88e04fba6..e49c66f07 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -60,8 +60,8 @@ GEM audited (4.9.0) activerecord (>= 4.2, < 6.1) aws-eventstream (1.1.0) - aws-partitions (1.393.0) - aws-sdk-core (3.109.2) + aws-partitions (1.397.0) + aws-sdk-core (3.109.3) aws-eventstream (~> 1, >= 1.0.2) aws-partitions (~> 1, >= 1.239.0) aws-sigv4 (~> 1.1) @@ -69,7 +69,7 @@ GEM aws-sdk-kms (1.39.0) aws-sdk-core (~> 3, >= 3.109.0) aws-sigv4 (~> 1.1) - aws-sdk-s3 (1.84.1) + aws-sdk-s3 (1.85.0) aws-sdk-core (~> 3, >= 3.109.0) aws-sdk-kms (~> 1) aws-sigv4 (~> 1.1) @@ -272,11 +272,10 @@ GEM i18n_data (0.8.0) iso-639 (0.3.5) iso8601 (0.9.1) - jaro_winkler (1.5.4) jmespath (1.4.0) json (2.3.1) json-canonicalization (0.2.0) - json-ld (3.1.4) + json-ld (3.1.5) htmlentities (~> 4.3) json-canonicalization (~> 0.2) link_header (~> 0.0, >= 0.0.8) @@ -382,7 +381,6 @@ GEM addressable css_parser (>= 1.6.0) htmlentities (>= 4.0.0) - prettier (0.13.0) promise.rb (0.7.4) public_suffix (2.0.5) pwqgen.rb (0.1.0) @@ -428,7 +426,7 @@ GEM rb-inotify (0.10.1) ffi (~> 1.0) rchardet (1.8.0) - rdf (3.1.6) + rdf (3.1.7) hamster (~> 3.0) link_header (~> 0.0, >= 0.0.8) rdf-aggregate-repo (3.1.0) @@ -459,6 +457,7 @@ GEM http-cookie (>= 1.0.2, < 2.0) mime-types (>= 1.16, < 4.0) netrc (~> 0.8) + rexml (3.2.4) rspec (3.9.0) rspec-core (~> 3.9.0) rspec-expectations (~> 3.9.0) @@ -486,19 +485,29 @@ GEM rspec-mocks (~> 3.9.0) rspec-support (~> 3.9.0) rspec-support (3.9.4) - rubocop (0.77.0) - jaro_winkler (~> 1.5.1) + rubocop (1.3.1) parallel (~> 1.10) - parser (>= 2.6) + parser (>= 2.7.1.5) rainbow (>= 2.2.2, < 4.0) + regexp_parser (>= 1.8) + rexml + rubocop-ast (>= 1.1.1) ruby-progressbar (~> 1.7) - unicode-display_width (>= 1.4.0, < 1.7) - rubocop-performance (1.6.1) - rubocop (>= 0.71.0) - rubocop-rails (2.5.2) - activesupport + unicode-display_width (>= 1.4.0, < 2.0) + rubocop-ast (1.1.1) + parser (>= 2.7.1.5) + rubocop-packaging (0.5.1) + rubocop (>= 0.89, < 2.0) + rubocop-performance (1.9.0) + rubocop (>= 0.90.0, < 2.0) + rubocop-ast (>= 0.4.0) + rubocop-rails (2.8.1) + activesupport (>= 4.2.0) rack (>= 1.1) - rubocop (>= 0.72.0) + rubocop (>= 0.87.0) + rubocop-rspec (2.0.0) + rubocop (~> 1.0) + rubocop-ast (>= 1.1.0) ruby-enum (0.8.0) i18n ruby-progressbar (1.10.1) @@ -574,7 +583,7 @@ GEM unf (0.1.4) unf_ext unf_ext (0.0.7.7) - unicode-display_width (1.6.1) + unicode-display_width (1.7.0) unicode_utils (1.4.0) uniform_notifier (1.13.0) uuid (2.3.9) @@ -662,7 +671,6 @@ DEPENDENCIES oj (>= 2.8.3) oj_mimic_json (~> 1.0, >= 1.0.1) premailer (~> 1.11, >= 1.11.1) - prettier (~> 0.13.0) pwqgen.rb (~> 0.1.0) rack-cors (~> 1.0) rack-utf8_sanitizer (~> 1.6) @@ -671,9 +679,11 @@ DEPENDENCIES rspec-benchmark (~> 0.4.0) rspec-graphql_matchers (~> 1.1) rspec-rails (~> 3.8, >= 3.8.2) - rubocop (~> 0.77.0) + rubocop (~> 1.3, >= 1.3.1) + rubocop-packaging (~> 0.5.1) rubocop-performance (~> 1.5, >= 1.5.1) - rubocop-rails (~> 2.4) + rubocop-rails (~> 2.8, >= 2.8.1) + rubocop-rspec (~> 2.0) seedbank sentry-raven (~> 2.9) shoryuken (~> 4.0) diff --git a/Rakefile b/Rakefile index 9a5ea7383..d2a78aa25 100644 --- a/Rakefile +++ b/Rakefile @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Add your own tasks in files placed in lib/tasks ending in .rake, # for example lib/tasks/capistrano.rake, and they will automatically be available to Rake. diff --git a/app/controllers/activities_controller.rb b/app/controllers/activities_controller.rb index 9ade89485..4873d0a85 100644 --- a/app/controllers/activities_controller.rb +++ b/app/controllers/activities_controller.rb @@ -1,28 +1,40 @@ +# frozen_string_literal: true + class ActivitiesController < ApplicationController include Countable - before_action :set_activity, only: [:show] + before_action :set_activity, only: %i[show] def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { created: { order: "desc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { created: { order: "desc" } } + end page = page_from_params(params) - if params[:id].present? - response = Activity.find_by(id: params[:id]) + response = if params[:id].present? + Activity.find_by(id: params[:id]) elsif params[:ids].present? - response = Activity.find_by_id(params[:ids], page: page, sort: sort) + Activity.find_by_id(params[:ids], page: page, sort: sort) else - response = Activity.query(params[:query], - uid: params[:datacite_doi_id] || params[:provider_id] || params[:client_id] || params[:repository_id], - page: page, - sort: sort, - scroll_id: params[:scroll_id]) + Activity.query( + params[:query], + uid: + params[:datacite_doi_id] || params[:provider_id] || + params[:client_id] || + params[:repository_id], + page: page, + sort: sort, + scroll_id: params[:scroll_id], + ) end begin @@ -31,27 +43,35 @@ def index total = response.total else total = response.results.total - total_for_pages = page[:cursor].nil? ? total.to_f : [total.to_f, 10000].min + total_for_pages = + page[:cursor].nil? ? total.to_f : [total.to_f, 10_000].min total_pages = page[:size] > 0 ? (total_for_pages / page[:size]).ceil : 0 end if page[:scroll].present? options = {} options[:meta] = { - total: total, - "scroll-id" => response.scroll_id, + total: total, "scroll-id" => response.scroll_id }.compact options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/activities?" + { - "scroll-id" => response.scroll_id, - "page[scroll]" => page[:scroll], - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/activities?" + + { + "scroll-id" => response.scroll_id, + "page[scroll]" => page[:scroll], + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true - render json: ActivitySerializer.new(results, options).serialized_json, status: :ok + render json: ActivitySerializer.new(results, options).serialized_json, + status: :ok else results = response.results @@ -59,30 +79,49 @@ def index options[:meta] = { total: total, "totalPages" => total_pages, - page: page[:cursor].nil? && page[:number].present? ? page[:number] : nil, + page: + page[:cursor].nil? && page[:number].present? ? page[:number] : nil, }.compact options[:links] = { self: request.original_url, - next: response.results.size < page[:size] ? nil : request.base_url + "/activities?" + { - query: params[:query], - "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, - "page[number]" => page[:cursor].nil? && page[:number].present? ? page[:number] + 1 : nil, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if response.results.size < page[:size] + nil + else + request.base_url + "/activities?" + + { + query: params[:query], + "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, + "page[number]" => + if page[:cursor].nil? && page[:number].present? + page[:number] + 1 + end, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: ActivitySerializer.new(results, options).serialized_json, status: :ok + render json: ActivitySerializer.new(results, options).serialized_json, + status: :ok end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -91,14 +130,14 @@ def show options[:include] = @include options[:is_collection] = false - render json: ActivitySerializer.new(@activity, options).serialized_json, status: :ok + render json: ActivitySerializer.new(@activity, options).serialized_json, + status: :ok end protected - - def set_activity - response = Activity.find_by(id: params[:id]) - @activity = response.results.first - fail ActiveRecord::RecordNotFound if @activity.blank? - end + def set_activity + response = Activity.find_by(id: params[:id]) + @activity = response.results.first + fail ActiveRecord::RecordNotFound if @activity.blank? + end end diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index 158d8bfbd..5d63c386d 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ApplicationController < ActionController::API include ActionController::HttpAuthentication::Basic::ControllerMethods include Authenticable @@ -63,9 +65,7 @@ def default_format_json def authenticate_user_with_basic_auth! @user = authenticate_user! - if !@user - request_http_basic_authentication(realm = ENV["REALM"]) - end + request_http_basic_authentication(realm = ENV["REALM"]) if !@user @user end @@ -73,7 +73,9 @@ def authenticate_user_with_basic_auth! def authenticate_user! type, credentials = type_and_credentials_from_request_headers return false if credentials.blank? - raise JWT::VerificationError if (ENV["JWT_BLACKLISTED"] || "").split(",").include?(credentials) + if (ENV["JWT_BLACKLISTED"] || "").split(",").include?(credentials) + raise JWT::VerificationError + end @current_user = User.new(credentials, type: type) fail CanCan::AuthorizationNotPerformed if @current_user.errors.present? @@ -95,18 +97,31 @@ def authenticated_user end unless Rails.env.development? - rescue_from *RESCUABLE_EXCEPTIONS do |exception| - status = case exception.class.to_s - when "CanCan::AuthorizationNotPerformed", "JWT::DecodeError", "JWT::VerificationError" then 401 - when "CanCan::AccessDenied" then 403 - when "ActiveRecord::RecordNotFound", "AbstractController::ActionNotFound", "ActionController::RoutingError" then 404 - when "ActionController::UnknownFormat" then 406 - when "ActiveRecord::RecordNotUnique" then 409 - when "ActiveModel::ForbiddenAttributesError", "ActionController::ParameterMissing", "ActionController::UnpermittedParameters", "ActiveModelSerializers::Adapter::JsonApi::Deserialization::InvalidDocument" then 422 - when "ActionController::BadRequest" then 400 - when "SocketError" then 500 - else 400 - end + rescue_from(*RESCUABLE_EXCEPTIONS) do |exception| + status = + case exception.class.to_s + when "CanCan::AuthorizationNotPerformed", "JWT::DecodeError", + "JWT::VerificationError" + 401 + when "CanCan::AccessDenied" + 403 + when "ActiveRecord::RecordNotFound", "AbstractController::ActionNotFound", + "ActionController::RoutingError" + 404 + when "ActionController::UnknownFormat" + 406 + when "ActiveRecord::RecordNotUnique" + 409 + when "ActiveModel::ForbiddenAttributesError", "ActionController::ParameterMissing", + "ActionController::UnpermittedParameters", "ActiveModelSerializers::Adapter::JsonApi::Deserialization::InvalidDocument" + 422 + when "ActionController::BadRequest" + 400 + when "SocketError" + 500 + else + 400 + end if status == 401 message = "Bad credentials." @@ -121,7 +136,12 @@ def authenticated_user message = "The content type is not recognized." elsif status == 409 message = "The resource already exists." - elsif ["JSON::ParserError", "Nokogiri::XML::SyntaxError", "ActionDispatch::Http::Parameters::ParseError", "ActionController::BadRequest"].include?(exception.class.to_s) + elsif %w[ + JSON::ParserError + Nokogiri::XML::SyntaxError + ActionDispatch::Http::Parameters::ParseError + ActionController::BadRequest + ].include?(exception.class.to_s) message = exception.message else Raven.capture_exception(exception) @@ -129,7 +149,10 @@ def authenticated_user message = exception.message end - render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status + render json: { + errors: [{ status: status.to_s, title: message }], + }.to_json, + status: status end end @@ -142,29 +165,23 @@ def skip_bullet end protected - - def is_admin_or_staff? - current_user&.is_admin_or_staff? ? 1 : 0 - end + def is_admin_or_staff? + current_user&.is_admin_or_staff? ? 1 : 0 + end private + def append_info_to_payload(payload) + super + payload[:uid] = current_user.uid.downcase if current_user.try(:uid) + end - def append_info_to_payload(payload) - super - payload[:uid] = current_user.uid.downcase if current_user.try(:uid) - end - - def set_raven_context - if current_user.try(:uid) - Raven.user_context( - email: current_user.email, - id: current_user.uid, - ip_address: request.ip, - ) - else - Raven.user_context( - ip_address: request.ip, - ) + def set_raven_context + if current_user.try(:uid) + Raven.user_context( + email: current_user.email, id: current_user.uid, ip_address: request.ip, + ) + else + Raven.user_context(ip_address: request.ip) + end end - end end diff --git a/app/controllers/client_prefixes_controller.rb b/app/controllers/client_prefixes_controller.rb index a1c37c2ee..874fba3a1 100644 --- a/app/controllers/client_prefixes_controller.rb +++ b/app/controllers/client_prefixes_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "base32/url" require "uri" @@ -6,36 +8,56 @@ class ClientPrefixesController < ApplicationController before_action :authenticate_user! before_action :set_include load_and_authorize_resource except: %i[index show set_created set_provider] - around_action :skip_bullet, only: [:index], if: -> { defined?(Bullet) } + around_action :skip_bullet, only: %i[index], if: -> { defined?(Bullet) } def index - sort = case params[:sort] - when "name" then { "prefix.uid" => { order: "asc" } } - when "-name" then { "prefix.uid" => { order: "desc" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - else { created_at: { order: "desc" } } - end + sort = + case params[:sort] + when "name" + { "prefix.uid" => { order: "asc" } } + when "-name" + { "prefix.uid" => { order: "desc" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + else + { created_at: { order: "desc" } } + end page = page_from_params(params) - response = if params[:id].present? - ClientPrefix.find_by(id: params[:id]) - else - ClientPrefix.query(params[:query], - client_id: params[:client_id], - prefix_id: params[:prefix_id], - year: params[:year], - page: page, - sort: sort) - end + response = + if params[:id].present? + ClientPrefix.find_by(id: params[:id]) + else + ClientPrefix.query( + params[:query], + client_id: params[:client_id], + prefix_id: params[:prefix_id], + year: params[:year], + page: page, + sort: sort, + ) + end begin total = response.results.total total_pages = page[:size].positive? ? (total.to_f / page[:size]).ceil : 0 - years = total.positive? ? facet_by_year(response.response.aggregations.years.buckets) : nil - providers = total.positive? ? facet_by_combined_key(response.response.aggregations.providers.buckets) : nil - clients = total.positive? ? facet_by_combined_key(response.response.aggregations.clients.buckets) : nil + years = + if total.positive? + facet_by_year(response.response.aggregations.years.buckets) + end + providers = + if total.positive? + facet_by_combined_key( + response.response.aggregations.providers.buckets, + ) + end + clients = + if total.positive? + facet_by_combined_key(response.response.aggregations.clients.buckets) + end client_prefixes = response.results @@ -51,25 +73,42 @@ def index options[:links] = { self: request.original_url, - next: client_prefixes.blank? ? nil : request.base_url + "/client-prefixes?" + { - query: params[:query], - prefix: params[:prefix], - year: params[:year], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if client_prefixes.blank? + nil + else + request.base_url + "/client-prefixes?" + + { + query: params[:query], + prefix: params[:prefix], + year: params[:year], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: ClientPrefixSerializer.new(client_prefixes, options).serialized_json, status: :ok + render json: + ClientPrefixSerializer.new(client_prefixes, options). + serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -78,7 +117,10 @@ def show options[:include] = @include options[:is_collection] = false - render json: ClientPrefixSerializer.new(@client_prefix, options).serialized_json, status: :ok + render json: + ClientPrefixSerializer.new(@client_prefix, options). + serialized_json, + status: :ok end def create @@ -86,76 +128,103 @@ def create authorize! :create, @client_prefix if @client_prefix.save - if @client_prefix.__elasticsearch__.index_document.dig("result") != "created" - logger.error "Error adding Client Prefix #{@client_prefix.uid} to Elasticsearch index." + if @client_prefix.__elasticsearch__.index_document.dig("result") != + "created" + logger.error "Error adding Client Prefix #{ + @client_prefix.uid + } to Elasticsearch index." end - if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@client_prefix.prefix.uid}." + if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @client_prefix.prefix.uid + }." end - if @client_prefix.provider_prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Provider Prefix #{@client_prefix.provider_prefix.uid}." + if @client_prefix.provider_prefix.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Provider Prefix #{ + @client_prefix.provider_prefix.uid + }." end options = {} options[:include] = @include options[:is_collection] = false - render json: ClientPrefixSerializer.new(@client_prefix, options).serialized_json, status: :created + render json: + ClientPrefixSerializer.new(@client_prefix, options). + serialized_json, + status: :created else Rails.logger.error @client_prefix.errors.inspect - render json: serialize_errors(@client_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@client_prefix.errors), + status: :unprocessable_entity end end def update response.headers["Allow"] = "HEAD, GET, POST, DELETE, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end def destroy message = "Client prefix #{@client_prefix.uid} deleted." if @client_prefix.destroy - if @client_prefix.__elasticsearch__.delete_document.dig("result") != "deleted" - logger.error "Error deleting Client Prefix #{@client_prefix.uid} from Elasticsearch index." + if @client_prefix.__elasticsearch__.delete_document.dig("result") != + "deleted" + logger.error "Error deleting Client Prefix #{ + @client_prefix.uid + } from Elasticsearch index." end - if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@client_prefix.prefix.uid}." + if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @client_prefix.prefix.uid + }." end if @client_prefix.provider_prefix.__elasticsearch__.index_document - logger.error "Error updating Elasticsearch index for Provider Prefix #{@client_prefix.provider_prefix.uid}." + logger.error "Error updating Elasticsearch index for Provider Prefix #{ + @client_prefix.provider_prefix.uid + }." end logger.warn message head :no_content else Rails.logger.error @client_prefix.errors.inspect - render json: serialize_errors(@client_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@client_prefix.errors), + status: :unprocessable_entity end end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[client prefix provider_prefix provider] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[client prefix provider_prefix provider] + else + @include = [] + end end - end private + def set_client_prefix + @client_prefix = ClientPrefix.where(uid: params[:id]).first + fail ActiveRecord::RecordNotFound if @client_prefix.blank? + end - def set_client_prefix - @client_prefix = ClientPrefix.where(uid: params[:id]).first - fail ActiveRecord::RecordNotFound if @client_prefix.blank? - end - - def safe_params - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: %i[id client prefix providerPrefix], - keys: { "providerPrefix" => :provider_prefix } - ) - end + def safe_params + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: %i[id client prefix providerPrefix], + keys: { "providerPrefix" => :provider_prefix }, + ) + end end diff --git a/app/controllers/clients_controller.rb b/app/controllers/clients_controller.rb index ded065baa..83169f6f4 100644 --- a/app/controllers/clients_controller.rb +++ b/app/controllers/clients_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ClientsController < ApplicationController include Countable @@ -7,48 +9,74 @@ class ClientsController < ApplicationController load_and_authorize_resource except: %i[index show totals stats] def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "name.raw" => { order: "asc" } } - when "-name" then { "name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "name.raw" => { order: "asc" } } + when "-name" + { "name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "name.raw" => { order: "asc" } } + end page = page_from_params(params) - response = if params[:id].present? - Client.find_by(id: params[:id]) - elsif params[:ids].present? - Client.find_by_id(params[:ids], page: page, sort: sort) - else - Client.query( - params[:query], - year: params[:year], - from_date: params[:from_date], - until_date: params[:until_date], - provider_id: params[:provider_id], - re3data_id: params[:re3data_id], - opendoar_id: params[:opendoar_id], - software: params[:software], - certificate: params[:certificate], - repository_type: params[:repository_type], - client_type: params[:client_type], - page: page, - sort: sort, - ) - end + response = + if params[:id].present? + Client.find_by(id: params[:id]) + elsif params[:ids].present? + Client.find_by_id(params[:ids], page: page, sort: sort) + else + Client.query( + params[:query], + year: params[:year], + from_date: params[:from_date], + until_date: params[:until_date], + provider_id: params[:provider_id], + re3data_id: params[:re3data_id], + opendoar_id: params[:opendoar_id], + software: params[:software], + certificate: params[:certificate], + repository_type: params[:repository_type], + client_type: params[:client_type], + page: page, + sort: sort, + ) + end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_key_as_string(response.aggregations.years.buckets) : nil - providers = total > 0 ? facet_by_combined_key(response.aggregations.providers.buckets) : nil - software = total > 0 ? facet_by_software(response.aggregations.software.buckets) : nil - client_types = total > 0 ? facet_by_key(response.aggregations.client_types.buckets) : nil - certificates = total > 0 ? facet_by_key(response.aggregations.certificates.buckets) : nil - repository_types = total > 0 ? facet_by_key(response.aggregations.repository_types.buckets) : nil + years = + if total > 0 + facet_by_key_as_string(response.aggregations.years.buckets) + end + providers = + if total > 0 + facet_by_combined_key(response.aggregations.providers.buckets) + end + software = + if total > 0 + facet_by_software(response.aggregations.software.buckets) + end + client_types = + if total > 0 + facet_by_key(response.aggregations.client_types.buckets) + end + certificates = + if total > 0 + facet_by_key(response.aggregations.certificates.buckets) + end + repository_types = + if total > 0 + facet_by_key(response.aggregations.repository_types.buckets) + end @clients = response.results @@ -67,18 +95,25 @@ def index options[:links] = { self: request.original_url, - next: @clients.blank? ? nil : request.base_url + "/clients?" + { - query: params[:query], - "provider-id" => params[:provider_id], - software: params[:software], - certificate: params[:certificate], - "repositoryType" => params[:repository_type], - "clientTypes" => params[:client_type], - year: params[:year], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if @clients.blank? + nil + else + request.base_url + "/clients?" + + { + query: params[:query], + "provider-id" => params[:provider_id], + software: params[:software], + certificate: params[:certificate], + "repositoryType" => params[:repository_type], + "clientTypes" => params[:client_type], + year: params[:year], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true @@ -86,16 +121,27 @@ def index fields = fields_from_params(params) if fields - render json: ClientSerializer.new(@clients, options.merge(fields: fields)).serialized_json, status: :ok + render json: + ClientSerializer.new(@clients, options.merge(fields: fields)). + serialized_json, + status: :ok else - render json: ClientSerializer.new(@clients, options).serialized_json, status: :ok + render json: ClientSerializer.new(@clients, options).serialized_json, + status: :ok end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -105,7 +151,8 @@ def show options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: ClientSerializer.new(@client, options).serialized_json, status: :ok + render json: ClientSerializer.new(@client, options).serialized_json, + status: :ok end def create @@ -118,10 +165,12 @@ def create options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: ClientSerializer.new(@client, options).serialized_json, status: :created + render json: ClientSerializer.new(@client, options).serialized_json, + status: :created else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end @@ -135,13 +184,15 @@ def update authorize! :transfer, @client @client.transfer(provider_target_id: safe_params[:target_id]) - render json: ClientSerializer.new(@client, options).serialized_json, status: :ok + render json: ClientSerializer.new(@client, options).serialized_json, + status: :ok elsif @client.update(safe_params) - - render json: ClientSerializer.new(@client, options).serialized_json, status: :ok + render json: ClientSerializer.new(@client, options).serialized_json, + status: :ok else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end @@ -152,61 +203,133 @@ def destroy message = "Can't delete client that has DOIs." status = 400 Rails.logger.warn message - render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status + render json: { + errors: [{ status: status.to_s, title: message }], + }.to_json, + status: status elsif @client.update(is_active: nil, deleted_at: Time.zone.now) - @client.send_delete_email(responsible_id: current_user.uid) unless Rails.env.test? + unless Rails.env.test? + @client.send_delete_email(responsible_id: current_user.uid) + end head :no_content else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end def totals page = { size: 0, number: 1 } - state = current_user.present? && current_user.is_admin_or_staff? && params[:state].present? ? params[:state] : "registered,findable" - response = DataciteDoi.query(nil, provider_id: params[:provider_id], state: state, page: page, totals_agg: "client") - registrant = response.results.total.positive? ? clients_totals(response.aggregations.clients_totals.buckets) : [] + state = + if current_user.present? && current_user.is_admin_or_staff? && + params[:state].present? + params[:state] + else + "registered,findable" + end + response = + DataciteDoi.query( + nil, + provider_id: params[:provider_id], + state: state, + page: page, + totals_agg: "client", + ) + registrant = + if response.results.total.positive? + clients_totals(response.aggregations.clients_totals.buckets) + else + [] + end render json: registrant, status: :ok end def stats meta = { - dois: doi_count(client_id: params[:id]), + dois: + doi_count( + client_id: + # downloads: download_count(client_id: params[:id]), + params[ + :id + ], + ), "resourceTypes" => resource_type_count(client_id: params[:id]), - # citations: citation_count(client_id: params[:id]), - # views: view_count(client_id: params[:id]), - # downloads: download_count(client_id: params[:id]), }.compact render json: meta, status: :ok end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[provider repository] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[provider repository] + else + @include = [] + end end - end - def set_client - @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first - fail ActiveRecord::RecordNotFound if @client.blank? - end + def set_client + @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first + fail ActiveRecord::RecordNotFound if @client.blank? + end private + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:symbol, :name, "systemEmail", "contactEmail", "globusUuid", :domains, :provider, :url, "repositoryType", { "repositoryType" => [] }, :description, :language, { language: [] }, "alternateName", :software, "targetId", "isActive", "passwordInput", "clientType", :re3data, :opendoar, :issn, { issn: %i[issnl electronic print] }, :certificate, { certificate: [] }, "serviceContact", { "serviceContact": [:email, "givenName", "familyName"] }, "salesforceId"], - keys: { "systemEmail" => :system_email, "contactEmail" => :system_email, "globusUuid" => :globus_uuid, "salesforceId" => :salesforce_id, "targetId" => :target_id, "isActive" => :is_active, "passwordInput" => :password_input, "clientType" => :client_type, "alternateName" => :alternate_name, "repositoryType" => :repository_type, "serviceContact" => :service_contact } - ) - end + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [ + :symbol, + :name, + "systemEmail", + "contactEmail", + "globusUuid", + :domains, + :provider, + :url, + "repositoryType", + { "repositoryType" => [] }, + :description, + :language, + { language: [] }, + "alternateName", + :software, + "targetId", + "isActive", + "passwordInput", + "clientType", + :re3data, + :opendoar, + :issn, + { issn: %i[issnl electronic print] }, + :certificate, + { certificate: [] }, + "serviceContact", + { "serviceContact": [:email, "givenName", "familyName"] }, + "salesforceId", + ], + keys: { + "systemEmail" => :system_email, + "contactEmail" => :system_email, + "globusUuid" => :globus_uuid, + "salesforceId" => :salesforce_id, + "targetId" => :target_id, + "isActive" => :is_active, + "passwordInput" => :password_input, + "clientType" => :client_type, + "alternateName" => :alternate_name, + "repositoryType" => :repository_type, + "serviceContact" => :service_contact, + }, + ) + end end diff --git a/app/controllers/concerns/countable.rb b/app/controllers/concerns/countable.rb index d6018e327..4df1f944b 100644 --- a/app/controllers/concerns/countable.rb +++ b/app/controllers/concerns/countable.rb @@ -1,69 +1,156 @@ +# frozen_string_literal: true + module Countable extend ActiveSupport::Concern included do - def doi_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil) - response = if client_id - DataciteDoi.stats_query(client_id: client_id) - elsif provider_id - DataciteDoi.stats_query(provider_id: provider_id) - elsif consortium_id - DataciteDoi.stats_query(consortium_id: consortium_id) - elsif user_id - DataciteDoi.stats_query(user_id: user_id) - else - DataciteDoi.stats_query - end - - response.results.total.positive? ? facet_by_year(response.aggregations.created.buckets) : [] + def doi_count( + client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil + ) + response = + if client_id + DataciteDoi.stats_query(client_id: client_id) + elsif provider_id + DataciteDoi.stats_query(provider_id: provider_id) + elsif consortium_id + DataciteDoi.stats_query(consortium_id: consortium_id) + elsif user_id + DataciteDoi.stats_query(user_id: user_id) + else + DataciteDoi.stats_query + end + + if response.results.total.positive? + facet_by_year(response.aggregations.created.buckets) + else + [] + end end - def view_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil, state: nil) + def view_count( + client_id: nil, + provider_id: nil, + consortium_id: nil, + user_id: nil, + state: nil + ) if client_id - response = DataciteDoi.query(nil, client_id: client_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + client_id: client_id, page: { number: 1, size: 0 }, + ) elsif provider_id - response = DataciteDoi.query(nil, provider_id: provider_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + provider_id: provider_id, page: { number: 1, size: 0 }, + ) elsif consortium_id - response = DataciteDoi.query(nil, consortium_id: consortium_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + consortium_id: consortium_id, page: { number: 1, size: 0 }, + ) elsif user_id - response = DataciteDoi.query(nil, user_id: user_id, state: state, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + user_id: user_id, state: state, page: { number: 1, size: 0 }, + ) else response = DataciteDoi.query(nil, page: { number: 1, size: 0 }) end - response.results.total.positive? ? metric_facet_by_year(response.aggregations.views.buckets) : [] + if response.results.total.positive? + metric_facet_by_year(response.aggregations.views.buckets) + else + [] + end end - def download_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil, state: nil) + def download_count( + client_id: nil, + provider_id: nil, + consortium_id: nil, + user_id: nil, + state: nil + ) if client_id - response = DataciteDoi.query(nil, client_id: client_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + client_id: client_id, page: { number: 1, size: 0 }, + ) elsif provider_id - response = DataciteDoi.query(nil, provider_id: provider_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + provider_id: provider_id, page: { number: 1, size: 0 }, + ) elsif consortium_id - response = DataciteDoi.query(nil, consortium_id: consortium_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + consortium_id: consortium_id, page: { number: 1, size: 0 }, + ) elsif user_id - response = DataciteDoi.query(nil, user_id: user_id, state: state, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + user_id: user_id, state: state, page: { number: 1, size: 0 }, + ) else response = DataciteDoi.query(nil, page: { number: 1, size: 0 }) end - response.results.total.positive? ? metric_facet_by_year(response.aggregations.downloads.buckets) : [] + if response.results.total.positive? + metric_facet_by_year(response.aggregations.downloads.buckets) + else + [] + end end - def citation_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil, state: nil) + def citation_count( + client_id: nil, + provider_id: nil, + consortium_id: nil, + user_id: nil, + state: nil + ) if client_id - response = DataciteDoi.query(nil, client_id: client_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + client_id: client_id, page: { number: 1, size: 0 }, + ) elsif provider_id - response = DataciteDoi.query(nil, provider_id: provider_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + provider_id: provider_id, page: { number: 1, size: 0 }, + ) elsif consortium_id - response = DataciteDoi.query(nil, consortium_id: consortium_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + consortium_id: consortium_id, page: { number: 1, size: 0 }, + ) elsif user_id - response = DataciteDoi.query(nil, user_id: user_id, state: state, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + user_id: user_id, state: state, page: { number: 1, size: 0 }, + ) else response = DataciteDoi.query(nil, page: { number: 1, size: 0 }) end - response.results.total.positive? ? metric_facet_by_year(response.aggregations.citations.buckets) : [] + if response.results.total.positive? + metric_facet_by_year(response.aggregations.citations.buckets) + else + [] + end end # cumulative count clients by year @@ -71,21 +158,42 @@ def citation_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id # show all clients for admin def client_count(provider_id: nil, consortium_id: nil) if provider_id - response = Client.query(nil, provider_id: provider_id, include_deleted: true, page: { number: 1, size: 0 }) + response = + Client.query( + nil, + provider_id: provider_id, + include_deleted: true, + page: { number: 1, size: 0 }, + ) elsif consortium_id - response = Client.query(nil, consortium_id: consortium_id, include_deleted: true, page: { number: 1, size: 0 }) + response = + Client.query( + nil, + consortium_id: consortium_id, + include_deleted: true, + page: { number: 1, size: 0 }, + ) else - response = Client.query(nil, include_deleted: true, page: { number: 1, size: 0 }) + response = + Client.query(nil, include_deleted: true, page: { number: 1, size: 0 }) end - response.results.total.positive? ? facet_by_cumulative_year(response.aggregations.cumulative_years.buckets) : [] + if response.results.total.positive? + facet_by_cumulative_year(response.aggregations.cumulative_years.buckets) + else + [] + end end # count active clients by provider. Provider can only be deleted when there are no active clients. def active_client_count(provider_id: nil) return 0 if provider_id.blank? - response = Client.query(nil, provider_id: provider_id, page: { number: 1, size: 0 }) + response = + Client.query( + nil, + provider_id: provider_id, page: { number: 1, size: 0 }, + ) response.results.total end @@ -93,28 +201,76 @@ def active_client_count(provider_id: nil) # count until the previous year if provider has been deleted def provider_count(consortium_id: nil) if consortium_id - response = Provider.query(nil, consortium_id: consortium_id, include_deleted: true, page: { number: 1, size: 0 }) - response.results.total.positive? ? facet_by_cumulative_year(response.aggregations.cumulative_years.buckets) : [] + response = + Provider.query( + nil, + consortium_id: consortium_id, + include_deleted: true, + page: { number: 1, size: 0 }, + ) + if response.results.total.positive? + facet_by_cumulative_year( + response.aggregations.cumulative_years.buckets, + ) + else + [] + end else - response = Provider.query(nil, include_deleted: true, page: { number: 1, size: 0 }) - response.results.total.positive? ? facet_by_cumulative_year(response.aggregations.cumulative_years.buckets) : [] + response = + Provider.query( + nil, + include_deleted: true, page: { number: 1, size: 0 }, + ) + if response.results.total.positive? + facet_by_cumulative_year( + response.aggregations.cumulative_years.buckets, + ) + else + [] + end end end - def resource_type_count(client_id: nil, provider_id: nil, consortium_id: nil, user_id: nil, state: nil) + def resource_type_count( + client_id: nil, + provider_id: nil, + consortium_id: nil, + user_id: nil, + state: nil + ) if client_id - response = DataciteDoi.query(nil, client_id: client_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + client_id: client_id, page: { number: 1, size: 0 }, + ) elsif provider_id - response = DataciteDoi.query(nil, provider_id: provider_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + provider_id: provider_id, page: { number: 1, size: 0 }, + ) elsif consortium_id - response = DataciteDoi.query(nil, consortium_id: consortium_id, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + consortium_id: consortium_id, page: { number: 1, size: 0 }, + ) elsif user_id - response = DataciteDoi.query(nil, user_id: user_id, state: state, page: { number: 1, size: 0 }) + response = + DataciteDoi.query( + nil, + user_id: user_id, state: state, page: { number: 1, size: 0 }, + ) else response = DataciteDoi.query(nil, page: { number: 1, size: 0 }) end - response.results.total.positive? ? facet_by_combined_key(response.aggregations.resource_types.buckets) : [] + if response.results.total.positive? + facet_by_combined_key(response.aggregations.resource_types.buckets) + else + [] + end end end end diff --git a/app/controllers/concerns/error_serializable.rb b/app/controllers/concerns/error_serializable.rb index 52748a762..01b2a213d 100644 --- a/app/controllers/concerns/error_serializable.rb +++ b/app/controllers/concerns/error_serializable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module ErrorSerializable extend ActiveSupport::Concern @@ -5,15 +7,21 @@ module ErrorSerializable def serialize_errors(errors) return nil if errors.nil? - arr = Array.wrap(errors).reduce([]) do |sum, err| - source = err.keys.first + arr = + Array.wrap(errors).reduce([]) do |sum, err| + source = err.keys.first - Array.wrap(err.values.first).each do |title| - sum << { source: source, title: title.is_a?(String) ? title.sub(/^./, &:upcase) : title.to_s } - end + Array.wrap(err.values.first).each do |title| + sum << + { + source: source, + title: + title.is_a?(String) ? title.sub(/^./, &:upcase) : title.to_s, + } + end - sum - end + sum + end { errors: arr }.to_json end diff --git a/app/controllers/concerns/facetable.rb b/app/controllers/concerns/facetable.rb index 000668b7d..55effa52a 100644 --- a/app/controllers/concerns/facetable.rb +++ b/app/controllers/concerns/facetable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Facetable extend ActiveSupport::Concern @@ -57,22 +59,26 @@ module Facetable "ogl-canada-2.0" => "OGL-Canada-2.0", }.freeze - LOWER_BOUND_YEAR = 2010 + LOWER_BOUND_YEAR = 2_010 included do def facet_by_key_as_string(arr) arr.map do |hsh| - { "id" => hsh["key_as_string"], + { + "id" => hsh["key_as_string"], "title" => hsh["key_as_string"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_year(arr) arr.map do |hsh| - { "id" => hsh["key_as_string"][0..3], + { + "id" => hsh["key_as_string"][0..3], "title" => hsh["key_as_string"][0..3], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end @@ -80,21 +86,27 @@ def facet_by_year(arr) def facet_by_range(arr) interval = Date.current.year - LOWER_BOUND_YEAR - arr.select { |a| a["key_as_string"].to_i <= Date.current.year }[0..interval].map do |hsh| - { "id" => hsh["key_as_string"], + arr.select { |a| a["key_as_string"].to_i <= Date.current.year }[ + 0..interval + ]. + map do |hsh| + { + "id" => hsh["key_as_string"], "title" => hsh["key_as_string"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def metric_facet_by_year(arr) arr.reduce([]) do |sum, hsh| if hsh.dig("metric_count", "value").to_i > 0 - sum << { - "id" => hsh["key_as_string"][0..3], - "title" => hsh["key_as_string"][0..3], - "count" => hsh.dig("metric_count", "value").to_i, - } + sum << + { + "id" => hsh["key_as_string"][0..3], + "title" => hsh["key_as_string"][0..3], + "count" => hsh.dig("metric_count", "value").to_i, + } end sum @@ -103,49 +115,61 @@ def metric_facet_by_year(arr) def facet_annual(arr) arr.map do |hsh| - { "id" => hsh["key"][0..3], + { + "id" => hsh["key"][0..3], "title" => hsh["key"][0..3], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_date(arr) arr.map do |hsh| - { "id" => hsh["key"][0..9], + { + "id" => hsh["key"][0..9], "title" => hsh["key"][0..9], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_cumulative_year(arr) arr.map do |hsh| - { "id" => hsh["key"].to_s, + { + "id" => hsh["key"].to_s, "title" => hsh["key"].to_s, - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_key(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => hsh["key"].titleize, - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_software(arr) arr.map do |hsh| - { "id" => hsh["key"].parameterize(separator: "_"), + { + "id" => hsh["key"].parameterize(separator: "_"), "title" => hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_license(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => LICENSES[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end @@ -153,136 +177,164 @@ def facet_by_schema(arr) arr.map do |hsh| id = hsh["key"].split("-").last - { "id" => id, - "title" => "Schema #{id}", - "count" => hsh["doc_count"] } + { "id" => id, "title" => "Schema #{id}", "count" => hsh["doc_count"] } end end def facet_by_region(arr) arr.map do |hsh| - { "id" => hsh["key"].downcase, + { + "id" => hsh["key"].downcase, "title" => REGIONS[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_resource_type(arr) arr.map do |hsh| - { "id" => hsh["key"].underscore.dasherize, + { + "id" => hsh["key"].underscore.dasherize, "title" => hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_source(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => SOURCES[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_relation_type(arr) arr.map do |hsh| - year_month_arr = hsh.dig("year_month", "buckets").map do |h| - { - "id" => h["key_as_string"], - "title" => h["key_as_string"], - "sum" => h["doc_count"], - } - end - - { "id" => hsh["key"], + year_month_arr = + hsh.dig("year_month", "buckets").map do |h| + { + "id" => h["key_as_string"], + "title" => h["key_as_string"], + "sum" => h["doc_count"], + } + end + + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], - "yearMonths" => year_month_arr } + "yearMonths" => year_month_arr, + } end end def facet_by_relation_type_v1(arr) arr.map do |hsh| - year_month_arr = hsh.dig("year_month", "buckets").map do |h| - { - "id" => h["key_as_string"], - "title" => h["key_as_string"], - "sum" => h["doc_count"], - } - end - - { "id" => hsh["key"], + year_month_arr = + hsh.dig("year_month", "buckets").map do |h| + { + "id" => h["key_as_string"], + "title" => h["key_as_string"], + "sum" => h["doc_count"], + } + end + + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], - "year-months" => year_month_arr } + "year-months" => year_month_arr, + } end end def facet_by_citation_type(arr) arr.map do |hsh| - year_month_arr = hsh.dig("year_month", "buckets").map do |h| - { - "id" => h["key_as_string"], - "title" => h["key_as_string"], - "sum" => h["doc_count"], - } - end - - { "id" => hsh["key"], + year_month_arr = + hsh.dig("year_month", "buckets").map do |h| + { + "id" => h["key_as_string"], + "title" => h["key_as_string"], + "sum" => h["doc_count"], + } + end + + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], - "yearMonths" => year_month_arr } + "yearMonths" => year_month_arr, + } end end def facet_by_citation_type_v1(arr) arr.map do |hsh| - year_month_arr = hsh.dig("year_month", "buckets").map do |h| - { - "id" => h["key_as_string"], - "title" => h["key_as_string"], - "sum" => h["doc_count"], - } - end - - { "id" => hsh["key"], + year_month_arr = + hsh.dig("year_month", "buckets").map do |h| + { + "id" => h["key_as_string"], + "title" => h["key_as_string"], + "sum" => h["doc_count"], + } + end + + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], - "year-months" => year_month_arr } + "year-months" => year_month_arr, + } end end def facet_by_registrants(arr) arr.map do |hsh| - year_arr = hsh.dig("year", "buckets").map do |h| - { - "id" => h["key_as_string"], - "title" => h["key_as_string"], - "sum" => h["doc_count"], - } - end - - { "id" => hsh["key"], + year_arr = + hsh.dig("year", "buckets").map do |h| + { + "id" => h["key_as_string"], + "title" => h["key_as_string"], + "sum" => h["doc_count"], + } + end + + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], - "years" => year_arr } + "years" => year_arr, + } end end def providers_totals(arr) - providers = Provider.unscoped.where("allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR')").where(deleted_at: nil).pluck(:symbol, :name).to_h + providers = + Provider.unscoped.where( + "allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR')", + ). + where(deleted_at: nil). + pluck(:symbol, :name). + to_h arr.reduce([]) do |sum, hsh| if providers[hsh["key"].upcase] - sum << { "id" => hsh["key"], - "title" => providers[hsh["key"].upcase], - "count" => hsh["doc_count"], - "temporal" => { - "this_month" => facet_annual(hsh.this_month.buckets), - "this_year" => facet_annual(hsh.this_year.buckets), - "last_year" => facet_annual(hsh.last_year.buckets), - "two_years_ago" => facet_annual(hsh.two_years_ago.buckets), - }, - "states" => facet_by_key(hsh.states.buckets) } + sum << + { + "id" => hsh["key"], + "title" => providers[hsh["key"].upcase], + "count" => hsh["doc_count"], + "temporal" => { + "this_month" => facet_annual(hsh.this_month.buckets), + "this_year" => facet_annual(hsh.this_year.buckets), + "last_year" => facet_annual(hsh.last_year.buckets), + "two_years_ago" => facet_annual(hsh.two_years_ago.buckets), + }, + "states" => facet_by_key(hsh.states.buckets), + } end sum @@ -291,7 +343,8 @@ def providers_totals(arr) def prefixes_totals(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => hsh["key"], "count" => hsh["doc_count"], "temporal" => { @@ -299,43 +352,47 @@ def prefixes_totals(arr) "this_year" => facet_annual(hsh.this_year.buckets), "last_year" => facet_annual(hsh.last_year.buckets), }, - "states" => facet_by_key(hsh.states.buckets) } + "states" => facet_by_key(hsh.states.buckets), + } end end def clients_totals(arr) clients = Client.all.pluck(:symbol, :name).to_h - arr = arr.map do |hsh| - { "id" => hsh["key"], - "title" => clients[hsh["key"].upcase], - "count" => hsh["doc_count"], - "temporal" => { - "this_month" => facet_annual(hsh.this_month.buckets), - "this_year" => facet_annual(hsh.this_year.buckets), - "last_year" => facet_annual(hsh.last_year.buckets), - "two_years_ago" => facet_annual(hsh.two_years_ago.buckets), - }, - "states" => facet_by_key(hsh.states.buckets) } - end + arr = + arr.map do |hsh| + { + "id" => hsh["key"], + "title" => clients[hsh["key"].upcase], + "count" => hsh["doc_count"], + "temporal" => { + "this_month" => facet_annual(hsh.this_month.buckets), + "this_year" => facet_annual(hsh.this_year.buckets), + "last_year" => facet_annual(hsh.last_year.buckets), + "two_years_ago" => facet_annual(hsh.two_years_ago.buckets), + }, + "states" => facet_by_key(hsh.states.buckets), + } + end end def facet_by_combined_key(arr) arr.map do |hsh| id, title = hsh["key"].split(":", 2) - { "id" => id, - "title" => title, - "count" => hsh["doc_count"] } + { "id" => id, "title" => title, "count" => hsh["doc_count"] } end end def facet_by_fos(arr) arr.map do |hsh| title = hsh["key"].gsub("FOS: ", "") - { "id" => title.parameterize(separator: "_"), + { + "id" => title.parameterize(separator: "_"), "title" => title, - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end end diff --git a/app/controllers/concerns/fieldable.rb b/app/controllers/concerns/fieldable.rb index 9db7fd604..5e1cee97d 100644 --- a/app/controllers/concerns/fieldable.rb +++ b/app/controllers/concerns/fieldable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Fieldable extend ActiveSupport::Concern diff --git a/app/controllers/concerns/paginatable.rb b/app/controllers/concerns/paginatable.rb index de77cf6c1..7961d3060 100644 --- a/app/controllers/concerns/paginatable.rb +++ b/app/controllers/concerns/paginatable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Paginatable extend ActiveSupport::Concern @@ -6,11 +8,7 @@ module Paginatable def page_from_params(params) p = params.to_unsafe_h.dig(:page) - page = if p.is_a?(Hash) - p.symbolize_keys - else - {} - end + page = p.is_a?(Hash) ? p.symbolize_keys : {} # Use scroll API for large number of results, e.g. to generate sitemaps # Alternatively use cursor @@ -21,7 +19,8 @@ def page_from_params(params) # When we decode and split, we'll always end up with an array # use urlsafe_decode to not worry about url-unsafe characters + and / # split into two strings so that DOIs with comma in them are left intact - page[:cursor] = Base64.urlsafe_decode64(page[:cursor].to_s).split(",", 2) + page[:cursor] = + Base64.urlsafe_decode64(page[:cursor].to_s).split(",", 2) rescue ArgumentError # If we fail to decode we'll just default back to an empty cursor page[:cursor] = [] @@ -31,20 +30,24 @@ def page_from_params(params) # Elasticsearch is limited to 10000 results per query, so we liit with max_number # max number of results per page is 1000 if page[:size].present? - page[:size] = [page[:size].to_i, 1000].min - max_number = page[:size] > 0 ? 10000 / page[:size] : 1 + page[:size] = [page[:size].to_i, 1_000].min + max_number = page[:size] > 0 ? 10_000 / page[:size] : 1 else page[:size] = 25 - max_number = 10000 / page[:size] + max_number = 10_000 / page[:size] end - page[:number] = page[:number].to_i > 0 ? [page[:number].to_i, max_number].min : 1 + page[:number] = + page[:number].to_i > 0 ? [page[:number].to_i, max_number].min : 1 page end def make_cursor(results) # Base64-encode cursor - Base64.urlsafe_encode64(results.to_a.last[:sort].join(","), padding: false) + Base64.urlsafe_encode64( + results.to_a.last[:sort].join(","), + padding: false, + ) end end end diff --git a/app/controllers/data_centers_controller.rb b/app/controllers/data_centers_controller.rb index 888be323f..f44c10d77 100644 --- a/app/controllers/data_centers_controller.rb +++ b/app/controllers/data_centers_controller.rb @@ -1,37 +1,57 @@ +# frozen_string_literal: true + class DataCentersController < ApplicationController - before_action :set_client, only: [:show] + before_action :set_client, only: %i[show] before_action :set_include def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "name.raw" => { order: "asc" } } - when "-name" then { "name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "name.raw" => { order: "asc" } } + when "-name" + { "name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "name.raw" => { order: "asc" } } + end page = page_from_params(params) - response = if params[:id].present? - Client.find_by(id: params[:id]) - elsif params[:ids].present? - Client.find_by_id(params[:ids], page: page, sort: sort) - else - Client.query(params[:query], - year: params[:year], - provider_id: params[:member_id], - fields: params[:fields], - page: page, - sort: sort) - end + response = + if params[:id].present? + Client.find_by(id: params[:id]) + elsif params[:ids].present? + Client.find_by_id(params[:ids], page: page, sort: sort) + else + Client.query( + params[:query], + year: params[:year], + provider_id: params[:member_id], + fields: params[:fields], + page: page, + sort: sort, + ) + end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_year(response.response.aggregations.years.buckets) : nil - providers = total > 0 ? facet_by_combined_key(response.response.aggregations.providers.buckets) : nil + years = + if total > 0 + facet_by_year(response.response.aggregations.years.buckets) + end + providers = + if total > 0 + facet_by_combined_key( + response.response.aggregations.providers.buckets, + ) + end @clients = response.results @@ -46,27 +66,42 @@ def index options[:links] = { self: request.original_url, - next: @clients.blank? ? nil : request.base_url + "/data-centers?" + { - query: params[:query], - "member-id" => params[:member_id], - year: params[:year], - fields: params[:fields], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if @clients.blank? + nil + else + request.base_url + "/data-centers?" + + { + query: params[:query], + "member-id" => params[:member_id], + year: params[:year], + fields: params[:fields], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true options[:links] = nil - render json: DataCenterSerializer.new(@clients, options).serialized_json, status: :ok + render json: DataCenterSerializer.new(@clients, options).serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -75,28 +110,27 @@ def show options[:include] = @include options[:is_collection] = false - render json: DataCenterSerializer.new(@client, options).serialized_json, status: :ok + render json: DataCenterSerializer.new(@client, options).serialized_json, + status: :ok end protected - - def set_include - if params[:include].present? - include_keys = { - "member" => :provider, - } - @include = params[:include].split(",").reduce([]) do |sum, i| - k = include_keys[i.downcase.underscore] - sum << k if k.present? - sum + def set_include + if params[:include].present? + include_keys = { "member" => :provider } + @include = + params[:include].split(",").reduce([]) do |sum, i| + k = include_keys[i.downcase.underscore] + sum << k if k.present? + sum + end + else + @include = [] end - else - @include = [] end - end - def set_client - @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first - fail ActiveRecord::RecordNotFound if @client.blank? - end + def set_client + @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first + fail ActiveRecord::RecordNotFound if @client.blank? + end end diff --git a/app/controllers/datacite_dois_controller.rb b/app/controllers/datacite_dois_controller.rb index f67155a01..7807dd4e5 100644 --- a/app/controllers/datacite_dois_controller.rb +++ b/app/controllers/datacite_dois_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "uri" require "base64" require "pp" @@ -11,90 +13,119 @@ class DataciteDoisController < ApplicationController before_action :set_raven_context, only: %i[create update validate] def index - sort = case params[:sort] - when "name" then { "doi" => { order: "asc" } } - when "-name" then { "doi" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - when "updated" then { updated: { order: "asc" } } - when "-updated" then { updated: { order: "desc" } } - when "published" then { published: { order: "asc" } } - when "-published" then { published: { order: "desc" } } - when "view-count" then { view_count: { order: "asc" } } - when "-view-count" then { view_count: { order: "desc" } } - when "download-count" then { download_count: { order: "asc" } } - when "-download-count" then { download_count: { order: "desc" } } - when "citation-count" then { citation_count: { order: "asc" } } - when "-citation-count" then { citation_count: { order: "desc" } } - when "relevance" then { "_score": { "order": "desc" } } - else { updated: { order: "desc" } } - end + sort = + case params[:sort] + when "name" + { "doi" => { order: "asc" } } + when "-name" + { "doi" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + when "updated" + { updated: { order: "asc" } } + when "-updated" + { updated: { order: "desc" } } + when "published" + { published: { order: "asc" } } + when "-published" + { published: { order: "desc" } } + when "view-count" + { view_count: { order: "asc" } } + when "-view-count" + { view_count: { order: "desc" } } + when "download-count" + { download_count: { order: "asc" } } + when "-download-count" + { download_count: { order: "desc" } } + when "citation-count" + { citation_count: { order: "asc" } } + when "-citation-count" + { citation_count: { order: "desc" } } + when "relevance" + { "_score": { "order": "desc" } } + else + { updated: { order: "desc" } } + end page = page_from_params(params) - sample_group_field = case params[:sample_group] - when "client" then "client_id" - when "data-center" then "client_id" - when "provider" then "provider_id" - when "resource-type" then "types.resourceTypeGeneral" - end + sample_group_field = + case params[:sample_group] + when "client" + "client_id" + when "data-center" + "client_id" + when "provider" + "provider_id" + when "resource-type" + "types.resourceTypeGeneral" + end # only show findable DOIs to anonymous users and role user - params[:state] = "findable" if current_user.nil? || current_user.role_id == "user" + if current_user.nil? || current_user.role_id == "user" + params[:state] = "findable" + end if params[:id].present? response = DataciteDoi.find_by(id: params[:id]) elsif params[:ids].present? response = DataciteDoi.find_by_ids(params[:ids], page: page, sort: sort) else - response = DataciteDoi.query(params[:query], - state: params[:state], - exclude_registration_agencies: true, - published: params[:published], - created: params[:created], - registered: params[:registered], - provider_id: params[:provider_id], - consortium_id: params[:consortium_id], - client_id: params[:client_id], - affiliation_id: params[:affiliation_id], - funder_id: params[:funder_id], - re3data_id: params[:re3data_id], - opendoar_id: params[:opendoar_id], - license: params[:license], - certificate: params[:certificate], - prefix: params[:prefix], - user_id: params[:user_id], - resource_type_id: params[:resource_type_id], - resource_type: params[:resource_type], - schema_version: params[:schema_version], - subject: params[:subject], - field_of_science: params[:field_of_science], - has_citations: params[:has_citations], - has_references: params[:has_references], - has_parts: params[:has_parts], - has_part_of: params[:has_part_of], - has_versions: params[:has_versions], - has_version_of: params[:has_version_of], - has_views: params[:has_views], - has_downloads: params[:has_downloads], - has_person: params[:has_person], - has_affiliation: params[:has_affiliation], - has_organization: params[:has_organization], - has_funder: params[:has_funder], - link_check_status: params[:link_check_status], - link_check_has_schema_org: params[:link_check_has_schema_org], - link_check_body_has_pid: params[:link_check_body_has_pid], - link_check_found_schema_org_id: params[:link_check_found_schema_org_id], - link_check_found_dc_identifier: params[:link_check_found_dc_identifier], - link_check_found_citation_doi: params[:link_check_found_citation_doi], - link_check_redirect_count_gte: params[:link_check_redirect_count_gte], - sample_group: sample_group_field, - sample_size: params[:sample], - source: params[:source], - scroll_id: params[:scroll_id], - page: page, - sort: sort, - random: params[:random]) + response = + DataciteDoi.query( + params[:query], + state: params[:state], + exclude_registration_agencies: true, + published: params[:published], + created: params[:created], + registered: params[:registered], + provider_id: params[:provider_id], + consortium_id: params[:consortium_id], + client_id: params[:client_id], + affiliation_id: params[:affiliation_id], + funder_id: params[:funder_id], + re3data_id: params[:re3data_id], + opendoar_id: params[:opendoar_id], + license: params[:license], + certificate: params[:certificate], + prefix: params[:prefix], + user_id: params[:user_id], + resource_type_id: params[:resource_type_id], + resource_type: params[:resource_type], + schema_version: params[:schema_version], + subject: params[:subject], + field_of_science: params[:field_of_science], + has_citations: params[:has_citations], + has_references: params[:has_references], + has_parts: params[:has_parts], + has_part_of: params[:has_part_of], + has_versions: params[:has_versions], + has_version_of: params[:has_version_of], + has_views: params[:has_views], + has_downloads: params[:has_downloads], + has_person: params[:has_person], + has_affiliation: params[:has_affiliation], + has_organization: params[:has_organization], + has_funder: params[:has_funder], + link_check_status: params[:link_check_status], + link_check_has_schema_org: params[:link_check_has_schema_org], + link_check_body_has_pid: params[:link_check_body_has_pid], + link_check_found_schema_org_id: + params[:link_check_found_schema_org_id], + link_check_found_dc_identifier: + params[:link_check_found_dc_identifier], + link_check_found_citation_doi: params[:link_check_found_citation_doi], + link_check_redirect_count_gte: params[:link_check_redirect_count_gte], + sample_group: sample_group_field, + sample_size: params[:sample], + source: params[:source], + scroll_id: params[:scroll_id], + page: page, + sort: sort, + random: params[:random], + ) end begin @@ -109,9 +140,12 @@ def index end # Results to return are either our sample group dois or the regular hit results + + # The total is just the length because for sample grouping we get everything back in one shot no pagination. + if sample_dois results = sample_dois - # The total is just the length because for sample grouping we get everything back in one shot no pagination. + total = sample_dois.length total_pages = 1 elsif page[:scroll].present? @@ -123,23 +157,30 @@ def index else results = response.results total = response.results.total - total_for_pages = page[:cursor].nil? ? [total.to_f, 10000].min : total.to_f + total_for_pages = + page[:cursor].nil? ? [total.to_f, 10_000].min : total.to_f total_pages = page[:size] > 0 ? (total_for_pages / page[:size]).ceil : 0 end if page[:scroll].present? options = {} options[:meta] = { - total: total, - "scroll-id" => response.scroll_id, + total: total, "scroll-id" => response.scroll_id }.compact options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/dois?" + { - "scroll-id" => response.scroll_id, - "page[scroll]" => page[:scroll], - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/dois?" + + { + "scroll-id" => response.scroll_id, + "page[scroll]" => page[:scroll], + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true options[:params] = { @@ -152,38 +193,104 @@ def index # sparse fieldsets fields = fields_from_params(params) if fields - render json: DataciteDoiSerializer.new(results, options.merge(fields: fields)).serialized_json, status: :ok + render json: + DataciteDoiSerializer.new( + results, + options.merge(fields: fields), + ). + serialized_json, + status: :ok else - render json: DataciteDoiSerializer.new(results, options).serialized_json, status: :ok + render json: + DataciteDoiSerializer.new(results, options).serialized_json, + status: :ok end else - states = total.positive? ? facet_by_key(response.aggregations.states.buckets) : nil - resource_types = total.positive? ? facet_by_combined_key(response.aggregations.resource_types.buckets) : nil - published = total.positive? ? facet_by_range(response.aggregations.published.buckets) : nil - created = total.positive? ? facet_by_key_as_string(response.aggregations.created.buckets) : nil - registered = total.positive? ? facet_by_key_as_string(response.aggregations.registered.buckets) : nil - providers = total.positive? ? facet_by_combined_key(response.aggregations.providers.buckets) : nil - clients = total.positive? ? facet_by_combined_key(response.aggregations.clients.buckets) : nil - prefixes = total.positive? ? facet_by_key(response.aggregations.prefixes.buckets) : nil - schema_versions = total.positive? ? facet_by_schema(response.aggregations.schema_versions.buckets) : nil - - affiliations = total.positive? ? facet_by_combined_key(response.aggregations.affiliations.buckets) : nil + states = + if total.positive? + facet_by_key(response.aggregations.states.buckets) + end + resource_types = + if total.positive? + facet_by_combined_key(response.aggregations.resource_types.buckets) + end + published = + if total.positive? + facet_by_range(response.aggregations.published.buckets) + end + created = + if total.positive? + facet_by_key_as_string(response.aggregations.created.buckets) + end + registered = + if total.positive? + facet_by_key_as_string(response.aggregations.registered.buckets) + end + providers = + if total.positive? + facet_by_combined_key(response.aggregations.providers.buckets) + end + clients = + if total.positive? + facet_by_combined_key(response.aggregations.clients.buckets) + end + prefixes = + if total.positive? + facet_by_key(response.aggregations.prefixes.buckets) + end + schema_versions = + if total.positive? + facet_by_schema(response.aggregations.schema_versions.buckets) + end + + affiliations = + if total.positive? + facet_by_combined_key(response.aggregations.affiliations.buckets) + end # sources = total.positive? ? facet_by_key(response.aggregations.sources.buckets) : nil - subjects = total.positive? ? facet_by_key(response.aggregations.subjects.buckets) : nil - fields_of_science = total.positive? ? facet_by_fos(response.aggregations.fields_of_science.subject.buckets) : nil - certificates = total.positive? ? facet_by_key(response.aggregations.certificates.buckets) : nil - licenses = total.positive? ? facet_by_license(response.aggregations.licenses.buckets) : nil + subjects = + if total.positive? + facet_by_key(response.aggregations.subjects.buckets) + end + fields_of_science = + if total.positive? + facet_by_fos( + response.aggregations.fields_of_science.subject.buckets, + ) + end + certificates = + if total.positive? + facet_by_key(response.aggregations.certificates.buckets) + end + licenses = + if total.positive? + facet_by_license(response.aggregations.licenses.buckets) + end - link_checks_status = total.positive? ? facet_by_cumulative_year(response.aggregations.link_checks_status.buckets) : nil + link_checks_status = + if total.positive? + facet_by_cumulative_year( + response.aggregations.link_checks_status.buckets, + ) + end # links_with_schema_org = total.positive? ? facet_by_cumulative_year(response.aggregations.link_checks_has_schema_org.buckets) : nil # link_checks_schema_org_id = total.positive? ? response.aggregations.link_checks_schema_org_id.value : nil # link_checks_dc_identifier = total.positive? ? response.aggregations.link_checks_dc_identifier.value : nil # link_checks_citation_doi = total.positive? ? response.aggregations.link_checks_citation_doi.value : nil # links_checked = total.positive? ? response.aggregations.links_checked.value : nil - citations = total.positive? ? metric_facet_by_year(response.aggregations.citations.buckets) : nil - views = total.positive? ? metric_facet_by_year(response.aggregations.views.buckets) : nil - downloads = total.positive? ? metric_facet_by_year(response.aggregations.downloads.buckets) : nil + citations = + if total.positive? + metric_facet_by_year(response.aggregations.citations.buckets) + end + views = + if total.positive? + metric_facet_by_year(response.aggregations.views.buckets) + end + downloads = + if total.positive? + metric_facet_by_year(response.aggregations.downloads.buckets) + end respond_to do |format| format.json do @@ -191,7 +298,10 @@ def index options[:meta] = { total: total, "totalPages" => total_pages, - page: page[:cursor].nil? && page[:number].present? ? page[:number] : nil, + page: + if page[:cursor].nil? && page[:number].present? + page[:number] + end, states: states, "resourceTypes" => resource_types, created: created, @@ -220,38 +330,49 @@ def index options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/dois?" + { - query: params[:query], - "provider-id" => params[:provider_id], - "consortium-id" => params[:consortium_id], - "client-id" => params[:client_id], - "funder-id" => params[:funder_id], - "affiliation-id" => params[:affiliation_id], - "resource-type-id" => params[:resource_type_id], - prefix: params[:prefix], - certificate: params[:certificate], - published: params[:published], - created: params[:created], - registered: params[:registered], - "has-citations" => params[:has_citations], - "has-references" => params[:has_references], - "has-parts" => params[:has_parts], - "has-part-of" => params[:has_part_of], - "has-versions" => params[:has_versions], - "has-version-of" => params[:has_version_of], - "has-views" => params[:has_views], - "has-downloads" => params[:has_downloads], - "has-person" => params[:has_person], - "has-affiliation" => params[:has_affiliation], - "has-funder" => params[:has_funder], - detail: params[:detail], - composite: params[:composite], - affiliation: params[:affiliation], - # The cursor link should be an array of values, but we want to encode it into a single string for the URL - "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, - "page[number]" => page[:cursor].nil? && page[:number].present? ? page[:number] + 1 : nil, - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/dois?" + + { + query: params[:query], + "provider-id" => params[:provider_id], + "consortium-id" => params[:consortium_id], + "client-id" => params[:client_id], + "funder-id" => params[:funder_id], + "affiliation-id" => params[:affiliation_id], + "resource-type-id" => params[:resource_type_id], + prefix: params[:prefix], + certificate: params[:certificate], + published: params[:published], + created: params[:created], + registered: params[:registered], + "has-citations" => params[:has_citations], + "has-references" => params[:has_references], + "has-parts" => params[:has_parts], + "has-part-of" => params[:has_part_of], + "has-versions" => params[:has_versions], + "has-version-of" => params[:has_version_of], + "has-views" => params[:has_views], + "has-downloads" => params[:has_downloads], + "has-person" => params[:has_person], + "has-affiliation" => params[:has_affiliation], + "has-funder" => params[:has_funder], + detail: params[:detail], + composite: params[:composite], + affiliation: params[:affiliation], + # The cursor link should be an array of values, but we want to encode it into a single string for the URL + "page[cursor]" => + page[:cursor] ? make_cursor(results) : nil, + "page[number]" => + if page[:cursor].nil? && page[:number].present? + page[:number] + 1 + end, + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true @@ -266,25 +387,67 @@ def index # sparse fieldsets fields = fields_from_params(params) if fields - render json: DataciteDoiSerializer.new(results, options.merge(fields: fields)).serialized_json, status: :ok + render json: + DataciteDoiSerializer.new( + results, + options.merge(fields: fields), + ). + serialized_json, + status: :ok else - render json: DataciteDoiSerializer.new(results, options).serialized_json, status: :ok + render json: + DataciteDoiSerializer.new(results, options). + serialized_json, + status: :ok end end format.citation do # fetch formatted citations - render citation: response.records.to_a, style: params[:style] || "apa", locale: params[:locale] || "en-US" + render citation: response.records.to_a, + style: params[:style] || "apa", + locale: params[:locale] || "en-US" + end + header = %w[ + doi + url + registered + state + resourceTypeGeneral + resourceType + title + author + publisher + publicationYear + ] + format.any( + :bibtex, + :citeproc, + :codemeta, + :crosscite, + :datacite, + :datacite_json, + :jats, + :ris, + :schema_org, + ) { render request.format.to_sym => response.records.to_a } + format.csv do + render request.format.to_sym => response.records.to_a, + header: header end - header = %w(doi url registered state resourceTypeGeneral resourceType title author publisher publicationYear) - format.any(:bibtex, :citeproc, :codemeta, :crosscite, :datacite, :datacite_json, :jats, :ris, :schema_org) { render request.format.to_sym => response.records.to_a } - format.csv { render request.format.to_sym => response.records.to_a, header: header } end end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") - - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) + + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -295,12 +458,20 @@ def show # response = DataciteDoi.find_by_id(params[:id]) # workaround until STI is enabled doi = DataciteDoi.where(type: "DataciteDoi").where(doi: params[:id]).first - fail ActiveRecord::RecordNotFound if doi.blank? || (doi.aasm_state != "findable" && not_allowed_by_doi_and_user(doi: doi, user: current_user)) + if doi.blank? || + ( + doi.aasm_state != "findable" && + not_allowed_by_doi_and_user(doi: doi, user: current_user) + ) + fail ActiveRecord::RecordNotFound + end respond_to do |format| format.json do # doi = response.results.first - fail ActiveRecord::RecordNotFound if not_allowed_by_doi_and_user(doi: doi, user: current_user) + if not_allowed_by_doi_and_user(doi: doi, user: current_user) + fail ActiveRecord::RecordNotFound + end options = {} options[:include] = @include @@ -312,18 +483,44 @@ def show affiliation: params[:affiliation], } - render json: DataciteDoiSerializer.new(doi, options).serialized_json, status: :ok + render json: DataciteDoiSerializer.new(doi, options).serialized_json, + status: :ok end # doi = response.records.first - fail ActiveRecord::RecordNotFound if not_allowed_by_doi_and_user(doi: doi, user: current_user) + if not_allowed_by_doi_and_user(doi: doi, user: current_user) + fail ActiveRecord::RecordNotFound + end format.citation do # fetch formatted citation - render citation: doi, style: params[:style] || "apa", locale: params[:locale] || "en-US" + render citation: doi, + style: params[:style] || "apa", + locale: params[:locale] || "en-US" end - header = %w(doi url registered state resourceTypeGeneral resourceType title author publisher publicationYear) - format.any(:bibtex, :citeproc, :codemeta, :crosscite, :datacite, :datacite_json, :jats, :ris, :schema_org) { render request.format.to_sym => doi } + header = %w[ + doi + url + registered + state + resourceTypeGeneral + resourceType + title + author + publisher + publicationYear + ] + format.any( + :bibtex, + :citeproc, + :codemeta, + :crosscite, + :datacite, + :datacite_json, + :jats, + :ris, + :schema_org, + ) { render request.format.to_sym => doi } format.csv { render request.format.to_sym => doi, header: header } end end @@ -337,11 +534,10 @@ def validate options = {} options[:include] = @include options[:is_collection] = false - options[:params] = { - current_ability: current_ability, - } + options[:params] = { current_ability: current_ability } - render json: DataciteDoiSerializer.new(@doi, options).serialized_json, status: :ok + render json: DataciteDoiSerializer.new(@doi, options).serialized_json, + status: :ok else logger.info @doi.errors.messages render json: serialize_errors(@doi.errors.messages), status: :ok @@ -368,10 +564,14 @@ def create affiliation: params[:affiliation], } - render json: DataciteDoiSerializer.new(@doi, options).serialized_json, status: :created, location: @doi + render json: DataciteDoiSerializer.new(@doi, options).serialized_json, + status: :created, + location: @doi else logger.error @doi.errors.inspect - render json: serialize_errors(@doi.errors), include: @include, status: :unprocessable_entity + render json: serialize_errors(@doi.errors), + include: @include, + status: :unprocessable_entity end end @@ -379,8 +579,9 @@ def update @doi = DataciteDoi.where(doi: params[:id]).first exists = @doi.present? + # capture username and password for reuse in the handle system + if exists - # capture username and password for reuse in the handle system @doi.current_user = current_user if params.dig(:data, :attributes, :mode) == "transfer" @@ -391,7 +592,11 @@ def update else authorize! :update, @doi if safe_params[:schema_version].blank? - @doi.assign_attributes(safe_params.except(:doi, :client_id).merge(schema_version: @doi[:schema_version] || LAST_SCHEMA_VERSION)) + @doi.assign_attributes( + safe_params.except(:doi, :client_id).merge( + schema_version: @doi[:schema_version] || LAST_SCHEMA_VERSION, + ), + ) else @doi.assign_attributes(safe_params.except(:doi, :client_id)) end @@ -417,10 +622,13 @@ def update affiliation: params[:affiliation], } - render json: DataciteDoiSerializer.new(@doi, options).serialized_json, status: exists ? :ok : :created + render json: DataciteDoiSerializer.new(@doi, options).serialized_json, + status: exists ? :ok : :created else logger.error @doi.errors.messages - render json: serialize_errors(@doi.errors.messages), include: @include, status: :unprocessable_entity + render json: serialize_errors(@doi.errors.messages), + include: @include, + status: :unprocessable_entity end end @@ -434,16 +642,15 @@ def undo options = {} options[:include] = @include options[:is_collection] = false - options[:params] = { - current_ability: current_ability, - detail: true, - - } + options[:params] = { current_ability: current_ability, detail: true } - render json: DataciteDoiSerializer.new(@doi, options).serialized_json, status: :ok + render json: DataciteDoiSerializer.new(@doi, options).serialized_json, + status: :ok else logger.error @doi.errors.messages - render json: serialize_errors(@doi.errors.messages), include: @include, status: :unprocessable_entity + render json: serialize_errors(@doi.errors.messages), + include: @include, + status: :unprocessable_entity end end @@ -458,20 +665,33 @@ def destroy head :no_content else logger.error @doi.errors.inspect - render json: serialize_errors(@doi.errors), status: :unprocessable_entity + render json: serialize_errors(@doi.errors), + status: :unprocessable_entity end else response.headers["Allow"] = "HEAD, GET, POST, PATCH, PUT, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end end def random if params[:prefix].present? - dois = generate_random_dois(params[:prefix], number: params[:number], size: params[:size]) + dois = + generate_random_dois( + params[:prefix], + number: params[:number], size: params[:size], + ) render json: { dois: dois }.to_json else - render json: { errors: [{ status: "422", title: "Parameter prefix is required" }] }.to_json, status: :unprocessable_entity + render json: { + errors: [ + { status: "422", title: "Parameter prefix is required" }, + ], + }.to_json, + status: :unprocessable_entity end end @@ -481,7 +701,9 @@ def get_url authorize! :get_url, @doi - if !@doi.is_registered_or_findable? || %w(europ).include?(@doi.provider_id) || @doi.type == "OtherDoi" + if !@doi.is_registered_or_findable? || + %w[europ].include?(@doi.provider_id) || + @doi.type == "OtherDoi" url = @doi.url head :no_content && return if url.blank? else @@ -489,8 +711,20 @@ def get_url if response.status == 200 url = response.body.dig("data", "values", 0, "data", "value") - elsif response.status == 400 && response.body.dig("errors", 0, "title", "responseCode") == 301 - response = OpenStruct.new(status: 403, body: { "errors" => [{ "status" => 403, "title" => "SERVER NOT RESPONSIBLE FOR HANDLE" }] }) + elsif response.status == 400 && + response.body.dig("errors", 0, "title", "responseCode") == 301 + response = + OpenStruct.new( + status: 403, + body: { + "errors" => [ + { + "status" => 403, + "title" => "SERVER NOT RESPONSIBLE FOR HANDLE", + }, + ], + }, + ) url = nil else url = nil @@ -500,18 +734,25 @@ def get_url if url.present? render json: { url: url }.to_json, status: :ok else - render json: response.body.to_json, status: response.status || :bad_request + render json: response.body.to_json, + status: response.status || :bad_request end end def get_dois authorize! :get_urls, Doi - client = Client.where("datacentre.symbol = ?", current_user.uid.upcase).first + client = + Client.where("datacentre.symbol = ?", current_user.uid.upcase).first client_prefix = client.prefixes.first head :no_content && return if client_prefix.blank? - dois = DataciteDoi.get_dois(prefix: client_prefix.uid, username: current_user.uid.upcase, password: current_user.password) + dois = + DataciteDoi.get_dois( + prefix: client_prefix.uid, + username: current_user.uid.upcase, + password: current_user.password, + ) if dois.length.positive? render json: { dois: dois }.to_json, status: :ok else @@ -528,223 +769,451 @@ def set_url # legacy method def status - render json: { message: "Not Implemented." }.to_json, status: :not_implemented + render json: { message: "Not Implemented." }.to_json, + status: :not_implemented end protected + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - - @include = @include & %i[client media] - else - @include = [] + @include = @include & %i[client media] + else + @include = [] + end end - end private + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - # alternateIdentifiers as alias for identifiers - # easier before strong_parameters are checked - if params.dig(:data, :attributes).present? && params.dig(:data, :attributes, :identifiers).blank? - params[:data][:attributes][:identifiers] = Array.wrap(params.dig(:data, :attributes, :alternateIdentifiers)).map do |a| - { identifier: a[:alternateIdentifier], - identifierType: a[:alternateIdentifierType] } + # alternateIdentifiers as alias for identifiers + # easier before strong_parameters are checked + if params.dig(:data, :attributes).present? && + params.dig(:data, :attributes, :identifiers).blank? + params[:data][:attributes][:identifiers] = + Array.wrap(params.dig(:data, :attributes, :alternateIdentifiers)). + map do |a| + { + identifier: a[:alternateIdentifier], + identifierType: a[:alternateIdentifierType], + } + end end - end - attributes = [ - :doi, - :confirmDoi, - :url, - :titles, - { titles: %i[title titleType lang] }, - :publisher, - :publicationYear, - :created, - :prefix, - :suffix, - :types, - { types: %i[resourceTypeGeneral resourceType schemaOrg bibtex citeproc ris] }, - :dates, - { dates: %i[date dateType dateInformation] }, - :subjects, - { subjects: %i[subject subjectScheme schemeUri valueUri lang] }, - :landingPage, - { landingPage: [ - :checked, + attributes = [ + :doi, + :confirmDoi, :url, - :status, - :contentType, - :error, - :redirectCount, - { redirectUrls: [] }, - :downloadLatency, - :hasSchemaOrg, - :schemaOrgId, - { schemaOrgId: [] }, - :dcIdentifier, - :citationDoi, - :bodyHasPid, - ] }, - :contentUrl, - { contentUrl: [] }, - :sizes, - { sizes: [] }, - :formats, - { formats: [] }, - :language, - :descriptions, - { descriptions: %i[description descriptionType lang] }, - :rightsList, - { rightsList: %i[rights rightsUri rightsIdentifier rightsIdentifierScheme schemeUri lang] }, - :xml, - :regenerate, - :source, - :version, - :metadataVersion, - :schemaVersion, - :state, - :isActive, - :reason, - :registered, - :updated, - :mode, - :event, - :regenerate, - :should_validate, - :client, - :creators, - { creators: [:nameType, { nameIdentifiers: %i[nameIdentifier nameIdentifierScheme schemeUri] }, :name, :givenName, :familyName, { affiliation: %i[name affiliationIdentifier affiliationIdentifierScheme schemeUri] }, :lang] }, - :contributors, - { contributors: [:nameType, { nameIdentifiers: %i[nameIdentifier nameIdentifierScheme schemeUri] }, :name, :givenName, :familyName, { affiliation: %i[name affiliationIdentifier affiliationIdentifierScheme schemeUri] }, :contributorType, :lang] }, - :identifiers, - { identifiers: %i[identifier identifierType] }, - :alternateIdentifiers, - { alternateIdentifiers: %i[alternateIdentifier alternateIdentifierType] }, - :relatedIdentifiers, - { relatedIdentifiers: %i[relatedIdentifier relatedIdentifierType relationType relatedMetadataScheme schemeUri schemeType resourceTypeGeneral relatedMetadataScheme schemeUri schemeType] }, - :fundingReferences, - { fundingReferences: %i[funderName funderIdentifier funderIdentifierType awardNumber awardUri awardTitle] }, - :geoLocations, - { geoLocations: [{ geoLocationPoint: %i[pointLongitude pointLatitude] }, { geoLocationBox: %i[westBoundLongitude eastBoundLongitude southBoundLatitude northBoundLatitude] }, :geoLocationPlace] }, - :container, - { container: %i[type identifier identifierType title volume issue firstPage lastPage] }, - :published, - :downloadsOverTime, - { downloadsOverTime: %i[yearMonth total] }, - :viewsOverTime, - { viewsOverTime: %i[yearMonth total] }, - :citationsOverTime, - { citationsOverTime: %i[year total] }, - :citationCount, - :downloadCount, - :partCount, - :partOfCount, - :referenceCount, - :versionCount, - :versionOfCount, - :viewCount, - ] - relationships = [{ client: [data: %i[type id]] }] - - # default values for attributes stored as JSON - defaults = { data: { titles: [], descriptions: [], types: {}, container: {}, dates: [], subjects: [], rightsList: [], creators: [], contributors: [], sizes: [], formats: [], contentUrl: [], identifiers: [], relatedIdentifiers: [], fundingReferences: [], geoLocations: [] } } - - p = params.require(:data).permit(:type, :id, attributes: attributes, relationships: relationships).reverse_merge(defaults) - client_id = p.dig("relationships", "client", "data", "id") || current_user.try(:client_id) - p = p.fetch("attributes").merge(client_id: client_id) - - # extract attributes from xml field and merge with attributes provided directly - xml = p[:xml].present? ? Base64.decode64(p[:xml]).force_encoding("UTF-8") : nil - - if xml.present? - # remove optional utf-8 bom - xml.gsub!("\xEF\xBB\xBF", "") - - # remove leading and trailing whitespace - xml = xml.strip - end + :titles, + { titles: %i[title titleType lang] }, + :publisher, + :publicationYear, + :created, + :prefix, + :suffix, + :types, + { + types: %i[ + resourceTypeGeneral + resourceType + schemaOrg + bibtex + citeproc + ris + ], + }, + :dates, + { dates: %i[date dateType dateInformation] }, + :subjects, + { subjects: %i[subject subjectScheme schemeUri valueUri lang] }, + :landingPage, + { + landingPage: [ + :checked, + :url, + :status, + :contentType, + :error, + :redirectCount, + { redirectUrls: [] }, + :downloadLatency, + :hasSchemaOrg, + :schemaOrgId, + { schemaOrgId: [] }, + :dcIdentifier, + :citationDoi, + :bodyHasPid, + ], + }, + :contentUrl, + { contentUrl: [] }, + :sizes, + { sizes: [] }, + :formats, + { formats: [] }, + :language, + :descriptions, + { descriptions: %i[description descriptionType lang] }, + :rightsList, + { + rightsList: %i[ + rights + rightsUri + rightsIdentifier + rightsIdentifierScheme + schemeUri + lang + ], + }, + :xml, + :regenerate, + :source, + :version, + :metadataVersion, + :schemaVersion, + :state, + :isActive, + :reason, + :registered, + :updated, + :mode, + :event, + :regenerate, + :should_validate, + :client, + :creators, + { + creators: [ + :nameType, + { + nameIdentifiers: %i[nameIdentifier nameIdentifierScheme schemeUri], + }, + :name, + :givenName, + :familyName, + { + affiliation: %i[ + name + affiliationIdentifier + affiliationIdentifierScheme + schemeUri + ], + }, + :lang, + ], + }, + :contributors, + { + contributors: [ + :nameType, + { + nameIdentifiers: %i[nameIdentifier nameIdentifierScheme schemeUri], + }, + :name, + :givenName, + :familyName, + { + affiliation: %i[ + name + affiliationIdentifier + affiliationIdentifierScheme + schemeUri + ], + }, + :contributorType, + :lang, + ], + }, + :identifiers, + { identifiers: %i[identifier identifierType] }, + :alternateIdentifiers, + { alternateIdentifiers: %i[alternateIdentifier alternateIdentifierType] }, + :relatedIdentifiers, + { + relatedIdentifiers: %i[ + relatedIdentifier + relatedIdentifierType + relationType + relatedMetadataScheme + schemeUri + schemeType + resourceTypeGeneral + relatedMetadataScheme + schemeUri + schemeType + ], + }, + :fundingReferences, + { + fundingReferences: %i[ + funderName + funderIdentifier + funderIdentifierType + awardNumber + awardUri + awardTitle + ], + }, + :geoLocations, + { + geoLocations: [ + { geoLocationPoint: %i[pointLongitude pointLatitude] }, + { + geoLocationBox: %i[ + westBoundLongitude + eastBoundLongitude + southBoundLatitude + northBoundLatitude + ], + }, + :geoLocationPlace, + ], + }, + :container, + { + container: %i[ + type + identifier + identifierType + title + volume + issue + firstPage + lastPage + ], + }, + :published, + :downloadsOverTime, + { downloadsOverTime: %i[yearMonth total] }, + :viewsOverTime, + { viewsOverTime: %i[yearMonth total] }, + :citationsOverTime, + { citationsOverTime: %i[year total] }, + :citationCount, + :downloadCount, + :partCount, + :partOfCount, + :referenceCount, + :versionCount, + :versionOfCount, + :viewCount, + ] + relationships = [{ client: [data: %i[type id]] }] + + # default values for attributes stored as JSON + defaults = { + data: { + titles: [], + descriptions: [], + types: {}, + container: {}, + dates: [], + subjects: [], + rightsList: [], + creators: [], + contributors: [], + sizes: [], + formats: [], + contentUrl: [], + identifiers: [], + relatedIdentifiers: [], + fundingReferences: [], + geoLocations: [], + }, + } - Array.wrap(params[:creators])&.each do |c| - fail(ActionController::UnpermittedParameters, ["nameIdentifiers must be an Array"]) if c[:nameIdentifiers]&.respond_to?(:keys) - end + p = + params.require(:data).permit( + :type, + :id, + attributes: attributes, relationships: relationships, + ). + reverse_merge(defaults) + client_id = + p.dig("relationships", "client", "data", "id") || + current_user.try(:client_id) + p = p.fetch("attributes").merge(client_id: client_id) + + # extract attributes from xml field and merge with attributes provided directly + xml = + p[:xml].present? ? Base64.decode64(p[:xml]).force_encoding("UTF-8") : nil + + if xml.present? + # remove optional utf-8 bom + xml.gsub!("\xEF\xBB\xBF", "") + + # remove leading and trailing whitespace + xml = xml.strip + end - Array.wrap(params[:contributors])&.each do |c| - fail(ActionController::UnpermittedParameters, ["nameIdentifiers must be an Array"]) if c[:nameIdentifiers]&.respond_to?(:keys) - end + Array.wrap(params[:creators])&.each do |c| + if c[:nameIdentifiers]&.respond_to?(:keys) + fail( + ActionController::UnpermittedParameters, + ["nameIdentifiers must be an Array"], + ) + end + end - meta = xml.present? ? parse_xml(xml, doi: p[:doi]) : {} - p[:schemaVersion] = METADATA_FORMATS.include?(meta["from"]) ? LAST_SCHEMA_VERSION : p[:schemaVersion] - xml = meta["string"] - - # if metadata for DOIs from other registration agencies are not found - fail ActiveRecord::RecordNotFound if meta["state"] == "not_found" - - read_attrs = [p[:creators], p[:contributors], p[:titles], p[:publisher], - p[:publicationYear], p[:types], p[:descriptions], p[:container], p[:sizes], - p[:formats], p[:version], p[:language], p[:dates], p[:identifiers], - p[:relatedIdentifiers], p[:fundingReferences], p[:geoLocations], p[:rightsList], - p[:subjects], p[:contentUrl], p[:schemaVersion]].compact - - # generate random DOI if no DOI is provided - # make random DOI predictable in test - if p[:doi].blank? && p[:prefix].present? && Rails.env.test? - p[:doi] = generate_random_dois(p[:prefix], number: 123456).first - elsif p[:doi].blank? && p[:prefix].present? - p[:doi] = generate_random_dois(p[:prefix]).first - end + Array.wrap(params[:contributors])&.each do |c| + if c[:nameIdentifiers]&.respond_to?(:keys) + fail( + ActionController::UnpermittedParameters, + ["nameIdentifiers must be an Array"], + ) + end + end - # replace DOI, but otherwise don't touch the XML - # use Array.wrap(read_attrs.first) as read_attrs may also be [[]] - if meta["from"] == "datacite" && Array.wrap(read_attrs.first).blank? - xml = replace_doi(xml, doi: p[:doi] || meta["doi"]) - elsif xml.present? || Array.wrap(read_attrs.first).present? - regenerate = true - end + meta = xml.present? ? parse_xml(xml, doi: p[:doi]) : {} + p[:schemaVersion] = + if METADATA_FORMATS.include?(meta["from"]) + LAST_SCHEMA_VERSION + else + p[:schemaVersion] + end + xml = meta["string"] + + # if metadata for DOIs from other registration agencies are not found + fail ActiveRecord::RecordNotFound if meta["state"] == "not_found" + + read_attrs = [ + p[:creators], + p[:contributors], + p[:titles], + p[:publisher], + p[:publicationYear], + p[:types], + p[:descriptions], + p[:container], + p[:sizes], + p[:formats], + p[:version], + p[:language], + p[:dates], + p[:identifiers], + p[:relatedIdentifiers], + p[:fundingReferences], + p[:geoLocations], + p[:rightsList], + p[:subjects], + p[:contentUrl], + p[:schemaVersion], + ].compact + + # generate random DOI if no DOI is provided + # make random DOI predictable in test + if p[:doi].blank? && p[:prefix].present? && Rails.env.test? + p[:doi] = generate_random_dois(p[:prefix], number: 123_456).first + elsif p[:doi].blank? && p[:prefix].present? + p[:doi] = generate_random_dois(p[:prefix]).first + end - p[:xml] = xml if xml.present? + # replace DOI, but otherwise don't touch the XML + # use Array.wrap(read_attrs.first) as read_attrs may also be [[]] + if meta["from"] == "datacite" && Array.wrap(read_attrs.first).blank? + xml = replace_doi(xml, doi: p[:doi] || meta["doi"]) + elsif xml.present? || Array.wrap(read_attrs.first).present? + regenerate = true + end - read_attrs_keys = %i[url creators contributors titles publisher - publicationYear types descriptions container sizes - formats language dates identifiers relatedIdentifiers - fundingReferences geoLocations rightsList agency - subjects contentUrl schemaVersion] + p[:xml] = xml if xml.present? + + read_attrs_keys = %i[ + url + creators + contributors + titles + publisher + publicationYear + types + descriptions + container + sizes + formats + language + dates + identifiers + relatedIdentifiers + fundingReferences + geoLocations + rightsList + agency + subjects + contentUrl + schemaVersion + ] + + # merge attributes from xml into regular attributes + # make sure we don't accidentally set any attributes to nil + read_attrs_keys.each do |attr| + if p.has_key?(attr) || meta.has_key?(attr.to_s.underscore) + p.merge!( + attr.to_s.underscore => + p[attr] || meta[attr.to_s.underscore] || p[attr], + ) + end + end - # merge attributes from xml into regular attributes - # make sure we don't accidentally set any attributes to nil - read_attrs_keys.each do |attr| - p.merge!(attr.to_s.underscore => p[attr] || meta[attr.to_s.underscore] || p[attr]) if p.has_key?(attr) || meta.has_key?(attr.to_s.underscore) - end + # handle version metadata + if p.has_key?(:version) || meta["version_info"].present? + p[:version_info] = p[:version] || meta["version_info"] + end - # handle version metadata - p[:version_info] = p[:version] || meta["version_info"] if p.has_key?(:version) || meta["version_info"].present? - - # only update landing_page info if something is received via API to not overwrite existing data - p[:landing_page] = p[:landingPage] if p[:landingPage].present? - - p.merge( - regenerate: p[:regenerate] || regenerate, - ).except( - # ignore camelCase keys, and read-only keys - :confirmDoi, :prefix, :suffix, :publicationYear, :alternateIdentifiers, - :rightsList, :relatedIdentifiers, :fundingReferences, :geoLocations, - :metadataVersion, :schemaVersion, :state, :mode, :isActive, :landingPage, - :created, :registered, :updated, :published, :lastLandingPage, :version, - :lastLandingPageStatus, :lastLandingPageStatusCheck, - :lastLandingPageStatusResult, :lastLandingPageContentType, :contentUrl, - :viewsOverTime, :downloadsOverTime, :citationsOverTime, :citationCount, :downloadCount, - :partCount, :partOfCount, :referenceCount, :versionCount, :versionOfCount, :viewCount - ) - end + # only update landing_page info if something is received via API to not overwrite existing data + p[:landing_page] = p[:landingPage] if p[:landingPage].present? + + p.merge(regenerate: p[:regenerate] || regenerate).except( + # ignore camelCase keys, and read-only keys + :confirmDoi, + :prefix, + :suffix, + :publicationYear, + :alternateIdentifiers, + :rightsList, + :relatedIdentifiers, + :fundingReferences, + :geoLocations, + :metadataVersion, + :schemaVersion, + :state, + :mode, + :isActive, + :landingPage, + :created, + :registered, + :updated, + :published, + :lastLandingPage, + :version, + :lastLandingPageStatus, + :lastLandingPageStatusCheck, + :lastLandingPageStatusResult, + :lastLandingPageContentType, + :contentUrl, + :viewsOverTime, + :downloadsOverTime, + :citationsOverTime, + :citationCount, + :downloadCount, + :partCount, + :partOfCount, + :referenceCount, + :versionCount, + :versionOfCount, + :viewCount, + ) + end - def set_raven_context - return nil if params.dig(:data, :attributes, :xml).blank? + def set_raven_context + return nil if params.dig(:data, :attributes, :xml).blank? - Raven.extra_context metadata: Base64.decode64(params.dig(:data, :attributes, :xml)) - end + Raven.extra_context metadata: + Base64.decode64(params.dig(:data, :attributes, :xml)) + end end diff --git a/app/controllers/events_controller.rb b/app/controllers/events_controller.rb index cb82dd8b7..b9387747e 100644 --- a/app/controllers/events_controller.rb +++ b/app/controllers/events_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class EventsController < ApplicationController include Identifiable @@ -7,16 +9,18 @@ class EventsController < ApplicationController prepend_before_action :authenticate_user!, except: %i[index show] before_action :detect_crawler - before_action :load_event, only: [:show] + before_action :load_event, only: %i[show] before_action :set_include, only: %i[index show create update] - authorize_resource only: [:destroy] + authorize_resource only: %i[destroy] def create - @event = Event.where(subj_id: safe_params[:subj_id]). - where(obj_id: safe_params[:obj_id]). - where(source_id: safe_params[:source_id]). - where(relation_type_id: safe_params[:relation_type_id]). - first + @event = + Event.where(subj_id: safe_params[:subj_id]).where( + obj_id: safe_params[:obj_id], + ). + where(source_id: safe_params[:source_id]). + where(relation_type_id: safe_params[:relation_type_id]). + first exists = @event.present? # create event if it doesn't exist already @@ -28,10 +32,14 @@ def create options = {} options[:is_collection] = false - render json: EventSerializer.new(@event, options).serialized_json, status: exists ? :ok : :created + render json: EventSerializer.new(@event, options).serialized_json, + status: exists ? :ok : :created else logger.error @event.errors.inspect - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end @@ -49,10 +57,14 @@ def update options = {} options[:is_collection] = false - render json: EventSerializer.new(@event, options).serialized_json, status: exists ? :ok : :created + render json: EventSerializer.new(@event, options).serialized_json, + status: exists ? :ok : :created else logger.error @event.errors.inspect - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end @@ -62,56 +74,71 @@ def show options[:include] = @include options[:is_collection] = false - render json: EventSerializer.new(@event, options).serialized_json, status: :ok + render json: EventSerializer.new(@event, options).serialized_json, + status: :ok end def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "obj_id" then { "obj_id" => { order: "asc" } } - when "-obj_id" then { "obj_id" => { order: "desc" } } - when "total" then { "total" => { order: "asc" } } - when "-total" then { "total" => { order: "desc" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - when "updated" then { updated_at: { order: "asc" } } - when "-updated" then { updated_at: { order: "desc" } } - when "relation_type_id" then { relation_type_id: { order: "asc" } } - else { updated_at: { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "obj_id" + { "obj_id" => { order: "asc" } } + when "-obj_id" + { "obj_id" => { order: "desc" } } + when "total" + { "total" => { order: "asc" } } + when "-total" + { "total" => { order: "desc" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + when "updated" + { updated_at: { order: "asc" } } + when "-updated" + { updated_at: { order: "desc" } } + when "relation_type_id" + { relation_type_id: { order: "asc" } } + else + { updated_at: { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Event.find_by(id: params[:id]) + response = if params[:id].present? + Event.find_by(id: params[:id]) elsif params[:ids].present? - response = Event.find_by_id(params[:ids], page: page, sort: sort) + Event.find_by_id(params[:ids], page: page, sort: sort) else - response = Event.query(params[:query], - subj_id: params[:subj_id], - obj_id: params[:obj_id], - source_doi: params[:source_doi], - target_doi: params[:target_doi], - doi: params[:doi_id] || params[:doi], - orcid: params[:orcid], - prefix: params[:prefix], - subtype: params[:subtype], - citation_type: params[:citation_type], - source_id: params[:source_id], - registrant_id: params[:registrant_id], - relation_type_id: params[:relation_type_id], - source_relation_type_id: params[:source_relation_type_id], - target_relation_type_id: params[:target_relation_type_id], - issn: params[:issn], - publication_year: params[:publication_year], - occurred_at: params[:occurred_at], - year_month: params[:year_month], - aggregations: params[:aggregations], - unique: params[:unique], - state_event: params[:state], - scroll_id: params[:scroll_id], - page: page, - sort: sort) + Event.query( + params[:query], + subj_id: params[:subj_id], + obj_id: params[:obj_id], + source_doi: params[:source_doi], + target_doi: params[:target_doi], + doi: params[:doi_id] || params[:doi], + orcid: params[:orcid], + prefix: params[:prefix], + subtype: params[:subtype], + citation_type: params[:citation_type], + source_id: params[:source_id], + registrant_id: params[:registrant_id], + relation_type_id: params[:relation_type_id], + source_relation_type_id: params[:source_relation_type_id], + target_relation_type_id: params[:target_relation_type_id], + issn: params[:issn], + publication_year: params[:publication_year], + occurred_at: params[:occurred_at], + year_month: params[:year_month], + aggregations: params[:aggregations], + unique: params[:unique], + state_event: params[:state], + scroll_id: params[:scroll_id], + page: page, + sort: sort, + ) end if page[:scroll].present? @@ -119,33 +146,63 @@ def index total = response.total else total = response.results.total - total_for_pages = page[:cursor].nil? ? [total.to_f, 10000].min : total.to_f - total_pages = page[:size].positive? ? (total_for_pages / page[:size]).ceil : 0 + total_for_pages = + page[:cursor].nil? ? [total.to_f, 10_000].min : total.to_f + total_pages = + page[:size].positive? ? (total_for_pages / page[:size]).ceil : 0 end if page[:scroll].present? options = {} options[:meta] = { - total: total, - "scroll-id" => response.scroll_id, + total: total, "scroll-id" => response.scroll_id }.compact options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/events?" + { - "scroll-id" => response.scroll_id, - "page[scroll]" => page[:scroll], - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/events?" + + { + "scroll-id" => response.scroll_id, + "page[scroll]" => page[:scroll], + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true - render json: EventSerializer.new(results, options).serialized_json, status: :ok + render json: EventSerializer.new(results, options).serialized_json, + status: :ok else - sources = total.positive? ? facet_by_source(response.response.aggregations.sources.buckets) : nil - prefixes = total.positive? ? facet_by_source(response.response.aggregations.prefixes.buckets) : nil - citation_types = total.positive? ? facet_by_citation_type(response.response.aggregations.citation_types.buckets) : nil - relation_types = total.positive? ? facet_by_relation_type(response.response.aggregations.relation_types.buckets) : nil - registrants = total.positive? ? facet_by_registrants(response.response.aggregations.registrants.buckets) : nil + sources = + if total.positive? + facet_by_source(response.response.aggregations.sources.buckets) + end + prefixes = + if total.positive? + facet_by_source(response.response.aggregations.prefixes.buckets) + end + citation_types = + if total.positive? + facet_by_citation_type( + response.response.aggregations.citation_types.buckets, + ) + end + relation_types = + if total.positive? + facet_by_relation_type( + response.response.aggregations.relation_types.buckets, + ) + end + registrants = + if total.positive? + facet_by_registrants( + response.response.aggregations.registrants.buckets, + ) + end results = response.results @@ -154,7 +211,8 @@ def index options[:meta] = { total: total, "totalPages" => total_pages, - page: page[:cursor].nil? && page[:number].present? ? page[:number] : nil, + page: + page[:cursor].nil? && page[:number].present? ? page[:number] : nil, sources: sources, prefixes: prefixes, "citationTypes" => citation_types, @@ -164,30 +222,41 @@ def index options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/events?" + { - "query" => params[:query], - "subj-id" => params[:subj_id], - "obj-id" => params[:obj_id], - "doi" => params[:doi], - "orcid" => params[:orcid], - "prefix" => params[:prefix], - "subtype" => params[:subtype], - "citation_type" => params[:citation_type], - "source-id" => params[:source_id], - "relation-type-id" => params[:relation_type_id], - "issn" => params[:issn], - "registrant-id" => params[:registrant_id], - "publication-year" => params[:publication_year], - "year-month" => params[:year_month], - "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, - "page[number]" => page[:cursor].nil? && page[:number].present? ? page[:number] + 1 : nil, - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/events?" + + { + "query" => params[:query], + "subj-id" => params[:subj_id], + "obj-id" => params[:obj_id], + "doi" => params[:doi], + "orcid" => params[:orcid], + "prefix" => params[:prefix], + "subtype" => params[:subtype], + "citation_type" => params[:citation_type], + "source-id" => params[:source_id], + "relation-type-id" => params[:relation_type_id], + "issn" => params[:issn], + "registrant-id" => params[:registrant_id], + "publication-year" => params[:publication_year], + "year-month" => params[:year_month], + "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, + "page[number]" => + if page[:cursor].nil? && page[:number].present? + page[:number] + 1 + end, + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true - render json: EventSerializer.new(results, options).serialized_json, status: :ok + render json: EventSerializer.new(results, options).serialized_json, + status: :ok end end @@ -198,35 +267,75 @@ def destroy if @event.destroy head :no_content else - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end protected + def load_event + response = Event.find_by(id: params[:id]) + @event = response.results.first + fail ActiveRecord::RecordNotFound if @event.blank? + end - def load_event - response = Event.find_by(id: params[:id]) - @event = response.results.first - fail ActiveRecord::RecordNotFound if @event.blank? - end - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[subj obj] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[subj obj] + else + @include = [] + end end - end private - - def safe_params - nested_params = [:id, :name, { author: ["givenName", "familyName", :name] }, :funder, { funder: ["@id", "@type", :name] }, "alternateName", "proxyIdentifiers", { "proxyIdentifiers" => [] }, :publisher, :periodical, {  periodical: %i[type id name issn] }, "volumeNumber", "issueNumber", :pagination, :issn, "datePublished", "dateModified", "registrantId", :doi, :url, :type] - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:id, "messageAction", "sourceToken", :callback, "subjId", "objId", "relationTypeId", "sourceId", :total, :license, "occurredAt", :subj, :obj, subj: nested_params, obj: nested_params], - keys: { id: :uuid } - ) - end + def safe_params + nested_params = [ + :id, + :name, + { author: ["givenName", "familyName", :name] }, + :funder, + { funder: ["@id", "@type", :name] }, + "alternateName", + "proxyIdentifiers", + { "proxyIdentifiers" => [] }, + :publisher, + :periodical, + {  periodical: %i[type id name issn] }, + "volumeNumber", + "issueNumber", + :pagination, + :issn, + "datePublished", + "dateModified", + "registrantId", + :doi, + :url, + :type, + ] + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [ + :id, + "messageAction", + "sourceToken", + :callback, + "subjId", + "objId", + "relationTypeId", + "sourceId", + :total, + :license, + "occurredAt", + :subj, + :obj, + { subj: nested_params, obj: nested_params }, + ], + keys: { id: :uuid }, + ) + end end diff --git a/app/controllers/exports_controller.rb b/app/controllers/exports_controller.rb index 74ad48857..cc476e5bc 100644 --- a/app/controllers/exports_controller.rb +++ b/app/controllers/exports_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ExportsController < ApplicationController include ActionController::MimeResponds @@ -13,9 +15,7 @@ class ExportsController < ApplicationController }.freeze REGIONS = { - "APAC" => "Asia Pacific", - "EMEA" => "EMEA", - "AMER" => "Americas", + "APAC" => "Asia Pacific", "EMEA" => "EMEA", "AMER" => "Americas" }.freeze def contacts @@ -23,8 +23,15 @@ def contacts begin # Loop through all providers - page = { size: 1000, number: 1 } - response = Provider.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: 1 } + response = + Provider.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) providers = response.results.to_a total = response.results.total @@ -33,87 +40,158 @@ def contacts # keep going for all pages page_num = 2 while page_num <= total_pages - page = { size: 1000, number: page_num } - response = Provider.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: page_num } + response = + Provider.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) providers = providers + response.results.to_a page_num += 1 end - headers = %w( - fabricaAccountId - fabricaId - email - firstName - lastName - type - ) + headers = %w[fabricaAccountId fabricaId email firstName lastName type] csv = headers.to_csv # Use a hashmap for the contacts to avoid duplicated contacts = {} - add_contact = Proc.new do |contacts, email, id, firstname, lastname, type| - if email - fabrica_id = id + "-" + email - unless contacts.has_key?(fabrica_id) - contacts[fabrica_id] = { - "fabricaAccountId" => id, - "fabricaId" => fabrica_id, - "email" => email, - "firstName" => firstname, - "lastName" => lastname.presence || email, - } - end - - if contacts[fabrica_id].has_key?("type") - contacts[fabrica_id]["type"] += ";" + type - else - contacts[fabrica_id]["type"] = type + add_contact = + Proc.new do |contacts, email, id, firstname, lastname, type| + if email + fabrica_id = id + "-" + email + unless contacts.has_key?(fabrica_id) + contacts[fabrica_id] = { + "fabricaAccountId" => id, + "fabricaId" => fabrica_id, + "email" => email, + "firstName" => firstname, + "lastName" => lastname.presence || email, + } + end + + if contacts[fabrica_id].has_key?("type") + contacts[fabrica_id]["type"] += ";" + type + else + contacts[fabrica_id]["type"] = type + end end end - end providers.each do |provider| if params[:type].blank? || params[:type] == "technical" - add_contact.call(contacts, provider.technical_contact.email, provider.symbol, provider.technical_contact.given_name, provider.technical_contact.family_name, "technical") if provider.technical_contact.present? - add_contact.call(contacts, provider.secondary_technical_contact.email, provider.symbol, provider.secondary_technical_contact.given_name, provider.secondary_technical_contact.family_name, "secondaryTechnical") if provider.secondary_technical_contact.present? + if provider.technical_contact.present? + add_contact.call( + contacts, + provider.technical_contact.email, + provider.symbol, + provider.technical_contact.given_name, + provider.technical_contact.family_name, + "technical", + ) + end + if provider.secondary_technical_contact.present? + add_contact.call( + contacts, + provider.secondary_technical_contact.email, + provider.symbol, + provider.secondary_technical_contact.given_name, + provider.secondary_technical_contact.family_name, + "secondaryTechnical", + ) + end end + if params[:type].blank? || params[:type] == "service" - add_contact.call(contacts, provider.service_contact.email, provider.symbol, provider.service_contact.given_name, provider.service_contact.family_name, "service") if provider.service_contact.present? - add_contact.call(contacts, provider.secondary_service_contact.email, provider.symbol, provider.secondary_service_contact.given_name, provider.secondary_service_contact.family_name, "secondaryService") if provider.secondary_service_contact.present? + if provider.service_contact.present? + add_contact.call( + contacts, + provider.service_contact.email, + provider.symbol, + provider.service_contact.given_name, + provider.service_contact.family_name, + "service", + ) + end + if provider.secondary_service_contact.present? + add_contact.call( + contacts, + provider.secondary_service_contact.email, + provider.symbol, + provider.secondary_service_contact.given_name, + provider.secondary_service_contact.family_name, + "secondaryService", + ) + end end + if params[:type].blank? || params[:type] == "voting" - add_contact.call(contacts, provider.voting_contact.email, provider.symbol, provider.voting_contact.given_name, provider.voting_contact.family_name, "voting") if provider.voting_contact.present? + if provider.voting_contact.present? + add_contact.call( + contacts, + provider.voting_contact.email, + provider.symbol, + provider.voting_contact.given_name, + provider.voting_contact.family_name, + "voting", + ) + end end + if params[:type].blank? || params[:type] == "billing" - add_contact.call(contacts, provider.billing_contact.email, provider.symbol, provider.billing_contact.given_name, provider.billing_contact.family_name, "billing") if provider.billing_contact.present? - add_contact.call(contacts, provider.secondary_billing_contact.email, provider.symbol, provider.secondary_billing_contact.given_name, provider.secondary_billing_contact.family_name, "secondaryBilling") if provider.secondary_billing_contact.present? + if provider.billing_contact.present? + add_contact.call( + contacts, + provider.billing_contact.email, + provider.symbol, + provider.billing_contact.given_name, + provider.billing_contact.family_name, + "billing", + ) + end + if provider.secondary_billing_contact.present? + add_contact.call( + contacts, + provider.secondary_billing_contact.email, + provider.symbol, + provider.secondary_billing_contact.given_name, + provider.secondary_billing_contact.family_name, + "secondaryBilling", + ) + end end end contacts.each do |_, contact| - csv += CSV.generate_line [ - contact["fabricaAccountId"], - contact["fabricaId"], - contact["email"], - contact["firstName"], - contact["lastName"], - contact["type"], - ] + csv += + CSV.generate_line [ + contact["fabricaAccountId"], + contact["fabricaId"], + contact["email"], + contact["firstName"], + contact["lastName"], + contact["type"], + ] end - filename = if params[:until_date] - "contacts-#{params.fetch(:type, 'all')}-#{params[:until_date]}.csv" - else - "contacts-#{params.fetch(:type, 'all')}-#{Date.today}.csv" - end + filename = + if params[:until_date] + "contacts-#{params.fetch(:type, 'all')}-#{params[:until_date]}.csv" + else + "contacts-#{params.fetch(:type, 'all')}-#{Date.today}.csv" + end send_data csv, filename: filename - rescue StandardError, Elasticsearch::Transport::Transport::Errors::BadRequest => e + rescue StandardError, + Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - render json: { "errors" => { "title" => e.message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => e.message } }.to_json, + status: :bad_request end end @@ -122,8 +200,15 @@ def organizations begin # Loop through all providers - page = { size: 1000, number: 1 } - response = Provider.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: 1 } + response = + Provider.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) providers = response.results.to_a total = response.results.total @@ -132,8 +217,15 @@ def organizations # keep going for all pages page_num = 2 while page_num <= total_pages - page = { size: 1000, number: page_num } - response = Provider.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: page_num } + response = + Provider.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) providers = providers + response.results.to_a page_num += 1 end @@ -171,11 +263,15 @@ def organizations row = { accountName: provider.name, fabricaAccountId: provider.symbol, - parentFabricaAccountId: provider.consortium_id.present? ? provider.consortium_id.upcase : nil, + parentFabricaAccountId: + if provider.consortium_id.present? + provider.consortium_id.upcase + end, isActive: provider.deleted_at.blank?, accountDescription: provider.description, accountWebsite: provider.website, - region: provider.region.present? ? export_region(provider.region) : nil, + region: + provider.region.present? ? export_region(provider.region) : nil, focusArea: provider.focus_area, sector: provider.organization_type, accountType: export_member_type(provider.member_type), @@ -186,29 +282,38 @@ def organizations billingCity: provider.billing_information.city, billingDepartment: provider.billing_information.department, billingOrganization: provider.billing_information.organization, - billingStateCode: provider.billing_information.state.present? ? provider.billing_information.state.split("-").last : nil, + billingStateCode: + if provider.billing_information.state.present? + provider.billing_information.state.split("-").last + end, billingCountryCode: provider.billing_information.country, twitter: provider.twitter_handle, rorId: provider.ror_id, created: export_date(provider.created), modified: export_date(provider.updated), - deleted: provider.deleted_at.present? ? export_date(provider.deleted_at) : nil, + deleted: + if provider.deleted_at.present? + export_date(provider.deleted_at) + end, }.values csv += CSV.generate_line row end - filename = if params[:until_date] - "organizations-#{params[:until_date]}.csv" - else - "organizations-#{Date.today}.csv" - end + filename = + if params[:until_date] + "organizations-#{params[:until_date]}.csv" + else + "organizations-#{Date.today}.csv" + end send_data csv, filename: filename - rescue StandardError, Elasticsearch::Transport::Transport::Errors::BadRequest => e + rescue StandardError, + Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - render json: { "errors" => { "title" => e.message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => e.message } }.to_json, + status: :bad_request end end @@ -216,8 +321,15 @@ def repositories # authorize! :export, :repositories # Loop through all clients - page = { size: 1000, number: 1 } - response = Client.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: 1 } + response = + Client.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) clients = response.results.to_a total = response.results.total @@ -226,8 +338,15 @@ def repositories # keep going for all pages page_num = 2 while page_num <= total_pages - page = { size: 1000, number: page_num } - response = Client.query(nil, page: page, from_date: params[:from_date], until_date: params[:until_date], include_deleted: true) + page = { size: 1_000, number: page_num } + response = + Client.query( + nil, + page: page, + from_date: params[:from_date], + until_date: params[:until_date], + include_deleted: true, + ) clients = clients + response.results.to_a page_num += 1 end @@ -235,7 +354,13 @@ def repositories logger.warn "Exporting #{clients.length} repositories." # Get doi counts via DOIs query and combine next to clients. - response = DataciteDoi.query(nil, state: "registered,findable", page: { size: 0, number: 1 }, totals_agg: "client_export") + response = + DataciteDoi.query( + nil, + state: "registered,findable", + page: { size: 0, number: 1 }, + totals_agg: "client_export", + ) client_totals = {} totals_buckets = response.aggregations.clients_totals.buckets @@ -270,47 +395,73 @@ def repositories clients.each do |client| # Limit for salesforce default of max 80 chars - name = +client.name.truncate(80) + name = + +client.name.truncate(80) # Clean the name to remove quotes, which can break csv parsers - name.gsub! /["']/, "" + name.gsub!(/["']/, "") row = { accountName: name, fabricaAccountId: client.symbol, - parentFabricaAccountId: client.provider.present? ? client.provider.symbol : nil, + parentFabricaAccountId: + client.provider.present? ? client.provider.symbol : nil, isActive: client.deleted_at.blank?, accountDescription: client.description, accountWebsite: client.url, generalContactEmail: client.system_email, - serviceContactEmail: client.service_contact.present? ? client.service_contact.email : nil, - serviceContactGivenName: client.service_contact.present? ? client.service_contact.given_name : nil, - serviceContactFamilyName: client.service_contact.present? ? client.service_contact.family_name : nil, + serviceContactEmail: + client.service_contact.present? ? client.service_contact.email : nil, + serviceContactGivenName: + if client.service_contact.present? + client.service_contact.given_name + end, + serviceContactFamilyName: + if client.service_contact.present? + client.service_contact.family_name + end, created: export_date(client.created), modified: export_date(client.updated), - deleted: client.deleted_at.present? ? export_date(client.deleted_at) : nil, - doisCountCurrentYear: client_totals[client.uid] ? client_totals[client.uid]["this_year"] : 0, - doisCountPreviousYear: client_totals[client.uid] ? client_totals[client.uid]["last_year"] : 0, - doisCountTotal: client_totals[client.uid] ? client_totals[client.uid]["count"] : 0, + deleted: + client.deleted_at.present? ? export_date(client.deleted_at) : nil, + doisCountCurrentYear: + if client_totals[client.uid] + client_totals[client.uid]["this_year"] + else + 0 + end, + doisCountPreviousYear: + if client_totals[client.uid] + client_totals[client.uid]["last_year"] + else + 0 + end, + doisCountTotal: + client_totals[client.uid] ? client_totals[client.uid]["count"] : 0, }.values csv += CSV.generate_line row end - filename = if params[:until_date] - "repositories-#{params[:until_date]}.csv" - else - "repositories-#{Date.today}.csv" - end + filename = + if params[:until_date] + "repositories-#{params[:until_date]}.csv" + else + "repositories-#{Date.today}.csv" + end send_data csv, filename: filename - rescue StandardError, Elasticsearch::Transport::Transport::Errors::BadRequest => e + rescue StandardError, + Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - render json: { "errors" => { "title" => e.message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => e.message } }.to_json, + status: :bad_request end def export_date(date) - DateTime.strptime(date, "%Y-%m-%dT%H:%M:%S").strftime("%d/%m/%YT%H:%M:%S.%3NUTC%:z") + DateTime.strptime(date, "%Y-%m-%dT%H:%M:%S").strftime( + "%d/%m/%YT%H:%M:%S.%3NUTC%:z", + ) end def export_member_type(member_type) diff --git a/app/controllers/graphql_controller.rb b/app/controllers/graphql_controller.rb index 343968375..4182805cc 100644 --- a/app/controllers/graphql_controller.rb +++ b/app/controllers/graphql_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class GraphqlController < ApplicationController before_action :authenticate_user! @@ -9,7 +11,11 @@ def execute tracing_enabled: ApolloFederation::Tracing.should_add_traces(headers), current_user: current_user, } - result = LupoSchema.execute(query, variables: variables, context: context, operation_name: operation_name) + result = + LupoSchema.execute( + query, + variables: variables, context: context, operation_name: operation_name, + ) render json: ApolloFederation::Tracing.attach_trace_to_result(result) rescue StandardError => e raise e unless Rails.env.development? @@ -18,29 +24,27 @@ def execute end private - - # Handle form data, JSON body, or a blank value - def ensure_hash(ambiguous_param) - case ambiguous_param - when String - if ambiguous_param.present? - ensure_hash(JSON.parse(ambiguous_param)) - else + # Handle form data, JSON body, or a blank value + def ensure_hash(ambiguous_param) + case ambiguous_param + when String + ambiguous_param.present? ? ensure_hash(JSON.parse(ambiguous_param)) : {} + when Hash, ActionController::Parameters + ambiguous_param + when nil {} + else + raise ArgumentError, "Unexpected parameter: #{ambiguous_param}" end - when Hash, ActionController::Parameters - ambiguous_param - when nil - {} - else - raise ArgumentError, "Unexpected parameter: #{ambiguous_param}" end - end - def handle_error_in_development(e) - Rails.logger.error e.message - Rails.logger.error e.backtrace.join("\n") + def handle_error_in_development(e) + Rails.logger.error e.message + Rails.logger.error e.backtrace.join("\n") - render json: { error: { message: e.message, backtrace: e.backtrace }, data: {} }, status: :internal_server_error - end + render json: { + error: { message: e.message, backtrace: e.backtrace }, data: {} + }, + status: :internal_server_error + end end diff --git a/app/controllers/heartbeat_controller.rb b/app/controllers/heartbeat_controller.rb index 03f31ff9e..2cfe9e5f9 100644 --- a/app/controllers/heartbeat_controller.rb +++ b/app/controllers/heartbeat_controller.rb @@ -1,6 +1,10 @@ +# frozen_string_literal: true + class HeartbeatController < ApplicationController def index heartbeat = Heartbeat.new - render plain: heartbeat.string, status: heartbeat.status, content_type: "text/plain" + render plain: heartbeat.string, + status: heartbeat.status, + content_type: "text/plain" end end diff --git a/app/controllers/index_controller.rb b/app/controllers/index_controller.rb index b1ceef24e..8179aab6c 100644 --- a/app/controllers/index_controller.rb +++ b/app/controllers/index_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class IndexController < ApplicationController include ActionController::MimeResponds @@ -16,15 +18,40 @@ def show end format.citation do # extract optional style and locale from header - headers = request.headers["HTTP_ACCEPT"].to_s.gsub(/\s+/, "").split(";", 3).reduce({}) do |sum, item| - sum[:style] = item.split("=").last if item.start_with?("style") - sum[:locale] = item.split("=").last if item.start_with?("locale") - sum - end - render citation: doi, style: params[:style] || headers[:style] || "apa", locale: params[:locale] || headers[:locale] || "en-US" + headers = + request.headers["HTTP_ACCEPT"].to_s.gsub(/\s+/, "").split(";", 3). + reduce({}) do |sum, item| + sum[:style] = item.split("=").last if item.start_with?("style") + sum[:locale] = item.split("=").last if item.start_with?("locale") + sum + end + render citation: doi, + style: params[:style] || headers[:style] || "apa", + locale: params[:locale] || headers[:locale] || "en-US" end - format.any(:bibtex, :citeproc, :codemeta, :crosscite, :datacite, :datacite_json, :jats, :ris, :schema_org) { render request.format.to_sym => doi } - header = %w(doi url registered state resourceTypeGeneral resourceType title author publisher publicationYear) + format.any( + :bibtex, + :citeproc, + :codemeta, + :crosscite, + :datacite, + :datacite_json, + :jats, + :ris, + :schema_org, + ) { render request.format.to_sym => doi } + header = %w[ + doi + url + registered + state + resourceTypeGeneral + resourceType + title + author + publisher + publicationYear + ] format.csv { render request.format.to_sym => doi, header: header } end rescue ActionController::UnknownFormat, ActionController::RoutingError @@ -38,6 +65,9 @@ def routing_error def method_not_allowed response.set_header("Allow", "POST") - render json: { "message": "This endpoint only supports POST requests." }.to_json, status: :method_not_allowed + render json: { + "message": "This endpoint only supports POST requests.", + }.to_json, + status: :method_not_allowed end end diff --git a/app/controllers/media_controller.rb b/app/controllers/media_controller.rb index 7dfedc584..03f218189 100644 --- a/app/controllers/media_controller.rb +++ b/app/controllers/media_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class MediaController < ApplicationController before_action :set_doi before_action :set_media, only: %i[show update destroy] @@ -11,34 +13,45 @@ def index page = page_from_params(params) total_pages = (total.to_f / page[:size]).ceil - order = case params[:sort] - when "name" then "dataset.doi" - when "-name" then "dataset.doi DESC" - when "created" then "media.created" - else "media.created DESC" - end + order = + case params[:sort] + when "name" + "dataset.doi" + when "-name" + "dataset.doi DESC" + when "created" + "media.created" + else + "media.created DESC" + end @media = collection.order(order).page(page[:number]).per(page[:size]) options = {} options[:meta] = { - total: total, - "totalPages" => total_pages, - page: page[:number].to_i, + total: total, "totalPages" => total_pages, page: page[:number].to_i }.compact options[:links] = { self: request.original_url, - next: @media.blank? ? nil : request.base_url + "/media?" + { - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if @media.blank? + nil + else + request.base_url + "/media?" + + { + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: MediaSerializer.new(@media, options).serialized_json, status: :ok + render json: MediaSerializer.new(@media, options).serialized_json, + status: :ok end def show @@ -46,7 +59,8 @@ def show options[:include] = @include options[:is_collection] = false - render json: MediaSerializer.new(@media, options).serialized_json, status: :ok + render json: MediaSerializer.new(@media, options).serialized_json, + status: :ok end def create @@ -59,10 +73,12 @@ def create options[:include] = @include options[:is_collection] = false - render json: MediaSerializer.new(@media, options).serialized_json, status: :created + render json: MediaSerializer.new(@media, options).serialized_json, + status: :created else Rails.logger.error @media.errors.inspect - render json: serialize_errors(@media.errors), status: :unprocessable_entity + render json: serialize_errors(@media.errors), + status: :unprocessable_entity end end @@ -74,10 +90,12 @@ def update options[:include] = @include options[:is_collection] = false - render json: MediaSerializer.new(@media, options).serialized_json, status: :ok + render json: MediaSerializer.new(@media, options).serialized_json, + status: :ok else Rails.logger.error @media.errors.inspect - render json: serialize_errors(@media.errors), status: :unprocessable_entity + render json: serialize_errors(@media.errors), + status: :unprocessable_entity end end @@ -88,42 +106,45 @@ def destroy head :no_content else Rails.logger.error @media.errors.inspect - render json: serialize_errors(@media.errors), status: :unprocessable_entity + render json: serialize_errors(@media.errors), + status: :unprocessable_entity end end protected + def set_doi + @doi = DataciteDoi.where(doi: params[:datacite_doi_id]).first + fail ActiveRecord::RecordNotFound if @doi.blank? + end - def set_doi - @doi = DataciteDoi.where(doi: params[:datacite_doi_id]).first - fail ActiveRecord::RecordNotFound if @doi.blank? - end - - def set_media - id = Base32::URL.decode(URI.decode(params[:id])) - fail ActiveRecord::RecordNotFound if id.blank? + def set_media + id = Base32::URL.decode(URI.decode(params[:id])) + fail ActiveRecord::RecordNotFound if id.blank? - @media = Media.where(id: id.to_i).first - fail ActiveRecord::RecordNotFound if @media.blank? - end + @media = Media.where(id: id.to_i).first + fail ActiveRecord::RecordNotFound if @media.blank? + end - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & [:doi] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[doi] + else + @include = [] + end end - end private - - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: ["mediaType", :url], - keys: { "mediaType" => :media_type } - ) - end + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end + + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: ["mediaType", :url], keys: { "mediaType" => :media_type }, + ) + end end diff --git a/app/controllers/members_controller.rb b/app/controllers/members_controller.rb index 4d46b974f..6ec10cc3a 100644 --- a/app/controllers/members_controller.rb +++ b/app/controllers/members_controller.rb @@ -1,40 +1,65 @@ +# frozen_string_literal: true + class MembersController < ApplicationController - before_action :set_provider, only: [:show] + before_action :set_provider, only: %i[show] def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "name.raw" => { order: "asc" } } - when "-name" then { "name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "name.raw" => { order: "asc" } } + when "-name" + { "name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "name.raw" => { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Provider.find_by(id: params[:id]) + response = if params[:id].present? + Provider.find_by(id: params[:id]) elsif params[:ids].present? - response = Provider.find_by_id(params[:ids], page: page, sort: sort) + Provider.find_by_id(params[:ids], page: page, sort: sort) else - response = Provider.query(params[:query], - year: params[:year], - region: params[:region], - organization_type: params[:organization_type], - focus_area: params[:focus_area], - fields: params[:fields], - page: page, - sort: sort) + Provider.query( + params[:query], + year: params[:year], + region: params[:region], + organization_type: params[:organization_type], + focus_area: params[:focus_area], + fields: params[:fields], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_year(response.response.aggregations.years.buckets) : nil - regions = total > 0 ? facet_by_region(response.response.aggregations.regions.buckets) : nil - organization_types = total > 0 ? facet_by_key(response.response.aggregations.organization_types.buckets) : nil - focus_areas = total > 0 ? facet_by_key(response.response.aggregations.focus_areas.buckets) : nil + years = + if total > 0 + facet_by_year(response.response.aggregations.years.buckets) + end + regions = + if total > 0 + facet_by_region(response.response.aggregations.regions.buckets) + end + organization_types = + if total > 0 + facet_by_key( + response.response.aggregations.organization_types.buckets, + ) + end + focus_areas = + if total > 0 + facet_by_key(response.response.aggregations.focus_areas.buckets) + end @members = response.results @@ -51,29 +76,44 @@ def index options[:links] = { self: request.original_url, - next: @members.blank? ? nil : request.base_url + "/members?" + { - query: params[:query], - year: params[:year], - region: params[:region], - "organization-type" => params[:organization_type], - "focus-area" => params[:focus_area], - fields: params[:fields], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: sort, - }.compact.to_query, + next: + if @members.blank? + nil + else + request.base_url + "/members?" + + { + query: params[:query], + year: params[:year], + region: params[:region], + "organization-type" => params[:organization_type], + "focus-area" => params[:focus_area], + fields: params[:fields], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: sort, + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true options[:links] = nil - render json: MemberSerializer.new(@members, options).serialized_json, status: :ok + render json: MemberSerializer.new(@members, options).serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -82,13 +122,19 @@ def show options[:include] = @include options[:is_collection] = false - render json: MemberSerializer.new(@provider, options).serialized_json, status: :ok + render json: MemberSerializer.new(@provider, options).serialized_json, + status: :ok end protected - - def set_provider - @provider = Provider.unscoped.where("allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')").where(deleted_at: nil).where(symbol: params[:id]).first - fail ActiveRecord::RecordNotFound if @provider.blank? - end + def set_provider + @provider = + Provider.unscoped.where( + "allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')", + ). + where(deleted_at: nil). + where(symbol: params[:id]). + first + fail ActiveRecord::RecordNotFound if @provider.blank? + end end diff --git a/app/controllers/metadata_controller.rb b/app/controllers/metadata_controller.rb index f98d31174..de487ac47 100644 --- a/app/controllers/metadata_controller.rb +++ b/app/controllers/metadata_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class MetadataController < ApplicationController before_action :set_doi before_action :set_metadata, only: %i[show destroy] @@ -9,39 +11,51 @@ def index fail ActiveRecord::RecordNotFound if @doi.blank? collection = @doi.metadata - total = @doi.cached_metadata_count.reduce(0) { |sum, d| sum + d[:count].to_i } + total = + @doi.cached_metadata_count.reduce(0) { |sum, d| sum + d[:count].to_i } page = page_from_params(params) total_pages = (total.to_f / page[:size]).ceil - order = case params[:sort] - when "name" then "dataset.doi" - when "-name" then "dataset.doi DESC" - when "created" then "metadata.created" - else "metadata.created DESC" - end + order = + case params[:sort] + when "name" + "dataset.doi" + when "-name" + "dataset.doi DESC" + when "created" + "metadata.created" + else + "metadata.created DESC" + end @metadata = collection.order(order).page(page[:number]).per(page[:size]) options = {} options[:meta] = { - total: total, - "totalPages" => total_pages, - page: page[:number].to_i, + total: total, "totalPages" => total_pages, page: page[:number].to_i }.compact options[:links] = { self: request.original_url, - next: @metadata.blank? ? nil : request.base_url + "/media?" + { - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if @metadata.blank? + nil + else + request.base_url + "/media?" + + { + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: MetadataSerializer.new(@metadata, options).serialized_json, status: :ok + render json: MetadataSerializer.new(@metadata, options).serialized_json, + status: :ok end def show @@ -49,7 +63,8 @@ def show options[:include] = @include options[:is_collection] = false - render json: MetadataSerializer.new(@metadata, options).serialized_json, status: :ok + render json: MetadataSerializer.new(@metadata, options).serialized_json, + status: :ok end def create @@ -64,10 +79,12 @@ def create options[:include] = @include options[:is_collection] = false - render json: MetadataSerializer.new(@metadata, options).serialized_json, status: :created + render json: MetadataSerializer.new(@metadata, options).serialized_json, + status: :created else Rails.logger.error @metadata.errors.inspect - render json: serialize_errors(@metadata.errors), status: :unprocessable_entity + render json: serialize_errors(@metadata.errors), + status: :unprocessable_entity end end @@ -79,45 +96,52 @@ def destroy head :no_content else Rails.logger.error @metadata.errors.inspect - render json: serialize_errors(@metadata.errors), status: :unprocessable_entity + render json: serialize_errors(@metadata.errors), + status: :unprocessable_entity end else response.headers["Allow"] = "HEAD, GET, POST, PATCH, PUT, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end end protected + def set_doi + @doi = DataciteDoi.where(doi: params[:datacite_doi_id]).first + fail ActiveRecord::RecordNotFound if @doi.blank? + end - def set_doi - @doi = DataciteDoi.where(doi: params[:datacite_doi_id]).first - fail ActiveRecord::RecordNotFound if @doi.blank? - end - - def set_metadata - id = Base32::URL.decode(URI.decode(params[:id])) - fail ActiveRecord::RecordNotFound if id.blank? + def set_metadata + id = Base32::URL.decode(URI.decode(params[:id])) + fail ActiveRecord::RecordNotFound if id.blank? - @metadata = Metadata.where(id: id.to_i).first - fail ActiveRecord::RecordNotFound if @metadata.blank? - end + @metadata = Metadata.where(id: id.to_i).first + fail ActiveRecord::RecordNotFound if @metadata.blank? + end - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & [:doi] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[doi] + else + @include = [] + end end - end private + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:xml] - ) - end + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: %i[xml], + ) + end end diff --git a/app/controllers/old_events_controller.rb b/app/controllers/old_events_controller.rb index 3e3c53626..ed4f8cf34 100644 --- a/app/controllers/old_events_controller.rb +++ b/app/controllers/old_events_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OldEventsController < ApplicationController include Identifiable @@ -5,16 +7,18 @@ class OldEventsController < ApplicationController prepend_before_action :authenticate_user!, except: %i[index show] before_action :detect_crawler - before_action :load_event, only: [:show] + before_action :load_event, only: %i[show] before_action :set_include, only: %i[index show create update] - authorize_resource only: [:destroy] + authorize_resource only: %i[destroy] def create - @event = Event.where(subj_id: safe_params[:subj_id]). - where(obj_id: safe_params[:obj_id]). - where(source_id: safe_params[:source_id]). - where(relation_type_id: safe_params[:relation_type_id]). - first + @event = + Event.where(subj_id: safe_params[:subj_id]).where( + obj_id: safe_params[:obj_id], + ). + where(source_id: safe_params[:source_id]). + where(relation_type_id: safe_params[:relation_type_id]). + first exists = @event.present? # create event if it doesn't exist already @@ -26,9 +30,13 @@ def create options = {} options[:is_collection] = false - render json: OldEventSerializer.new(@event, options).serialized_json, status: exists ? :ok : :created + render json: OldEventSerializer.new(@event, options).serialized_json, + status: exists ? :ok : :created else - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end @@ -46,9 +54,13 @@ def update options = {} options[:is_collection] = false - render json: OldEventSerializer.new(@event, options).serialized_json, status: exists ? :ok : :created + render json: OldEventSerializer.new(@event, options).serialized_json, + status: exists ? :ok : :created else - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end @@ -57,49 +69,64 @@ def show options = {} options[:is_collection] = false - render json: OldEventSerializer.new(@event, options).serialized_json, status: :ok + render json: OldEventSerializer.new(@event, options).serialized_json, + status: :ok end def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "obj_id" then { "obj_id" => { order: "asc" } } - when "-obj_id" then { "obj_id" => { order: "desc" } } - when "total" then { "total" => { order: "asc" } } - when "-total" then { "total" => { order: "desc" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - when "updated" then { updated_at: { order: "asc" } } - when "relation_type_id" then { relation_type_id: { order: "asc" } } - when "-updated" then { updated_at: { order: "desc" } } - else { updated_at: { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "obj_id" + { "obj_id" => { order: "asc" } } + when "-obj_id" + { "obj_id" => { order: "desc" } } + when "total" + { "total" => { order: "asc" } } + when "-total" + { "total" => { order: "desc" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + when "updated" + { updated_at: { order: "asc" } } + when "relation_type_id" + { relation_type_id: { order: "asc" } } + when "-updated" + { updated_at: { order: "desc" } } + else + { updated_at: { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Event.find_by(id: params[:id]) + response = if params[:id].present? + Event.find_by(id: params[:id]) elsif params[:ids].present? - response = Event.find_by_ids(params[:ids], page: page, sort: sort) + Event.find_by_ids(params[:ids], page: page, sort: sort) else - response = Event.query(params[:query], - subj_id: params[:subj_id], - obj_id: params[:obj_id], - doi: params[:doi], - orcid: params[:orcid], - prefix: params[:prefix], - subtype: params[:subtype], - citation_type: params[:citation_type], - source_id: params[:source_id], - registrant_id: params[:registrant_id], - relation_type_id: params[:relation_type_id], - issn: params[:issn], - occurred_at: params[:occurred_at], - publication_year: params[:publication_year], - year_month: params[:year_month], - scroll_id: params[:scroll_id], - page: page, - sort: sort) + Event.query( + params[:query], + subj_id: params[:subj_id], + obj_id: params[:obj_id], + doi: params[:doi], + orcid: params[:orcid], + prefix: params[:prefix], + subtype: params[:subtype], + citation_type: params[:citation_type], + source_id: params[:source_id], + registrant_id: params[:registrant_id], + relation_type_id: params[:relation_type_id], + issn: params[:issn], + occurred_at: params[:occurred_at], + publication_year: params[:publication_year], + year_month: params[:year_month], + scroll_id: params[:scroll_id], + page: page, + sort: sort, + ) end if page[:scroll].present? @@ -107,33 +134,58 @@ def index total = response.total else total = response.results.total - total_for_pages = page[:cursor].nil? ? total.to_f : [total.to_f, 10000].min + total_for_pages = + page[:cursor].nil? ? total.to_f : [total.to_f, 10_000].min total_pages = page[:size] > 0 ? (total_for_pages / page[:size]).ceil : 0 end if page[:scroll].present? options = {} options[:meta] = { - total: total, - "scroll-id" => response.scroll_id, + total: total, "scroll-id" => response.scroll_id }.compact options[:links] = { self: request.original_url, - next: results.size < page[:size] || page[:size] == 0 ? nil : request.base_url + "/events?" + { - "scroll-id" => response.scroll_id, - "page[scroll]" => page[:scroll], - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] || page[:size] == 0 + nil + else + request.base_url + "/events?" + + { + "scroll-id" => response.scroll_id, + "page[scroll]" => page[:scroll], + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true - render json: OldEventSerializer.new(results, options).serialized_json, status: :ok + render json: OldEventSerializer.new(results, options).serialized_json, + status: :ok else - sources = total > 0 ? facet_by_source(response.aggregations.sources.buckets) : nil - prefixes = total > 0 ? facet_by_source(response.aggregations.prefixes.buckets) : nil - citation_types = total > 0 ? facet_by_citation_type_v1(response.aggregations.citation_types.buckets) : nil - relation_types = total > 0 ? facet_by_relation_type_v1(response.aggregations.relation_types.buckets) : nil - registrants = total > 0 ? facet_by_registrants(response.aggregations.registrants.buckets) : nil + sources = + total > 0 ? facet_by_source(response.aggregations.sources.buckets) : nil + prefixes = + if total > 0 + facet_by_source(response.aggregations.prefixes.buckets) + end + citation_types = + if total > 0 + facet_by_citation_type_v1( + response.aggregations.citation_types.buckets, + ) + end + relation_types = + if total > 0 + facet_by_relation_type_v1( + response.aggregations.relation_types.buckets, + ) + end + registrants = + if total > 0 + facet_by_registrants(response.aggregations.registrants.buckets) + end results = response.results @@ -141,7 +193,8 @@ def index options[:meta] = { total: total, "total-pages" => total_pages, - page: page[:cursor].nil? && page[:number].present? ? page[:number] : nil, + page: + page[:cursor].nil? && page[:number].present? ? page[:number] : nil, sources: sources, prefixes: prefixes, "citation-types" => citation_types, @@ -151,29 +204,40 @@ def index options[:links] = { self: request.original_url, - next: results.size < page[:size] ? nil : request.base_url + "/events?" + { - "query" => params[:query], - "subj-id" => params[:subj_id], - "obj-id" => params[:obj_id], - "doi" => params[:doi], - "orcid" => params[:orcid], - "prefix" => params[:prefix], - "subtype" => params[:subtype], - "citation_type" => params[:citation_type], - "source-id" => params[:source_id], - "relation-type-id" => params[:relation_type_id], - "issn" => params[:issn], - "registrant-id" => params[:registrant_id], - "publication-year" => params[:publication_year], - "year-month" => params[:year_month], - "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, - "page[number]" => page[:cursor].nil? && page[:number].present? ? page[:number] + 1 : nil, - "page[size]" => page[:size], - }.compact.to_query, + next: + if results.size < page[:size] + nil + else + request.base_url + "/events?" + + { + "query" => params[:query], + "subj-id" => params[:subj_id], + "obj-id" => params[:obj_id], + "doi" => params[:doi], + "orcid" => params[:orcid], + "prefix" => params[:prefix], + "subtype" => params[:subtype], + "citation_type" => params[:citation_type], + "source-id" => params[:source_id], + "relation-type-id" => params[:relation_type_id], + "issn" => params[:issn], + "registrant-id" => params[:registrant_id], + "publication-year" => params[:publication_year], + "year-month" => params[:year_month], + "page[cursor]" => page[:cursor] ? make_cursor(results) : nil, + "page[number]" => + if page[:cursor].nil? && page[:number].present? + page[:number] + 1 + end, + "page[size]" => page[:size], + }.compact. + to_query + end, }.compact options[:is_collection] = true - render json: OldEventSerializer.new(results, options).serialized_json, status: :ok + render json: OldEventSerializer.new(results, options).serialized_json, + status: :ok end end @@ -182,35 +246,70 @@ def destroy if @event.destroy head :no_content else - errors = @event.errors.full_messages.map { |message| { status: 422, title: message } } + errors = + @event.errors.full_messages.map do |message| + { status: 422, title: message } + end render json: { errors: errors }, status: :unprocessable_entity end end protected + def load_event + response = Event.find_by(id: params[:id]) + @event = response.results.first + fail ActiveRecord::RecordNotFound if @event.blank? + end - def load_event - response = Event.find_by(id: params[:id]) - @event = response.results.first - fail ActiveRecord::RecordNotFound if @event.blank? - end - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[subj obj] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[subj obj] + else + @include = [] + end end - end private - - def safe_params - nested_params = [:id, :name, { author: ["given-name", "family-name", :name] }, "alternate-name", :publisher, "provider-id", :periodical, "volume-number", "issue-number", :pagination, :issn, "date-published", "registrant-id", :doi, :url, :type] - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:id, "message-action", "source-token", :callback, "subj-id", "obj-id", "relation-type-id", "source-id", :total, :license, "occurred-at", :subj, :obj, subj: nested_params, obj: nested_params], - keys: { id: :uuid } - ) - end + def safe_params + nested_params = [ + :id, + :name, + { author: ["given-name", "family-name", :name] }, + "alternate-name", + :publisher, + "provider-id", + :periodical, + "volume-number", + "issue-number", + :pagination, + :issn, + "date-published", + "registrant-id", + :doi, + :url, + :type, + ] + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [ + :id, + "message-action", + "source-token", + :callback, + "subj-id", + "obj-id", + "relation-type-id", + "source-id", + :total, + :license, + "occurred-at", + :subj, + :obj, + { subj: nested_params, obj: nested_params }, + ], + keys: { id: :uuid }, + ) + end end diff --git a/app/controllers/organizations_controller.rb b/app/controllers/organizations_controller.rb index cc2d02d7a..8e64ae471 100644 --- a/app/controllers/organizations_controller.rb +++ b/app/controllers/organizations_controller.rb @@ -1,46 +1,74 @@ +# frozen_string_literal: true + class OrganizationsController < ApplicationController include ActionController::MimeResponds include Countable - before_action :set_provider, only: [:show] + before_action :set_provider, only: %i[show] def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "name.raw" => { order: "asc" } } - when "-name" then { "name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "name.raw" => { order: "asc" } } + when "-name" + { "name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "name.raw" => { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Provider.find_by(id: params[:id]) + response = if params[:id].present? + Provider.find_by(id: params[:id]) elsif params[:ids].present? - response = Provider.find_by_id(params[:ids], page: page, sort: sort) + Provider.find_by_id(params[:ids], page: page, sort: sort) else - response = Provider.query(params[:query], - year: params[:year], - from_date: params[:from_date], - until_date: params[:until_date], - region: params[:region], - consortium_id: params[:provider_id], - organization_type: params[:organization_type], - focus_area: params[:focus_area], - page: page, - sort: sort) + Provider.query( + params[:query], + year: params[:year], + from_date: params[:from_date], + until_date: params[:until_date], + region: params[:region], + consortium_id: params[:provider_id], + organization_type: params[:organization_type], + focus_area: params[:focus_area], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_year(response.response.aggregations.years.buckets) : nil - regions = total > 0 ? facet_by_region(response.response.aggregations.regions.buckets) : nil - member_types = total > 0 ? facet_by_key(response.response.aggregations.member_types.buckets) : nil - organization_types = total > 0 ? facet_by_key(response.response.aggregations.organization_types.buckets) : nil - focus_areas = total > 0 ? facet_by_key(response.response.aggregations.focus_areas.buckets) : nil + years = + if total > 0 + facet_by_year(response.response.aggregations.years.buckets) + end + regions = + if total > 0 + facet_by_region(response.response.aggregations.regions.buckets) + end + member_types = + if total > 0 + facet_by_key(response.response.aggregations.member_types.buckets) + end + organization_types = + if total > 0 + facet_by_key( + response.response.aggregations.organization_types.buckets, + ) + end + focus_areas = + if total > 0 + facet_by_key(response.response.aggregations.focus_areas.buckets) + end @providers = response.results respond_to do |format| @@ -59,32 +87,46 @@ def index options[:links] = { self: request.original_url, - next: @providers.blank? ? nil : request.base_url + "/providers?" + { - query: params[:query], - year: params[:year], - region: params[:region], - "member_type" => params[:member_type], - "organization_type" => params[:organization_type], - "focus-area" => params[:focus_area], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: sort, - }.compact.to_query, + next: + if @providers.blank? + nil + else + request.base_url + "/providers?" + + { + query: params[:query], + year: params[:year], + region: params[:region], + "member_type" => params[:member_type], + "organization_type" => params[:organization_type], + "focus-area" => params[:focus_area], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: sort, + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - options[:params] = { - current_ability: current_ability, - } + options[:params] = { current_ability: current_ability } fields = fields_from_params(params) if fields - render json: ProviderSerializer.new(@providers, options.merge(fields: fields)).serialized_json, status: :ok + render json: + ProviderSerializer.new( + @providers, + options.merge(fields: fields), + ). + serialized_json, + status: :ok else - render json: ProviderSerializer.new(@providers, options).serialized_json, status: :ok + render json: + ProviderSerializer.new(@providers, options). + serialized_json, + status: :ok end end - header = %w( + header = %w[ accountName fabricaAccountId parentFabricaAccountId @@ -135,36 +177,50 @@ def index created updated deletedAt - ) - format.csv { render request.format.to_sym => response.records.to_a, header: header } + ] + format.csv do + render request.format.to_sym => response.records.to_a, header: header + end end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end def show options = {} options[:meta] = { - repositories: client_count(provider_id: params[:id] == "admin" ? nil : params[:id]), + repositories: + client_count(provider_id: params[:id] == "admin" ? nil : params[:id]), dois: doi_count(provider_id: params[:id] == "admin" ? nil : params[:id]), }.compact options[:include] = @include options[:is_collection] = false - options[:params] = { - current_ability: current_ability, - } - render json: ProviderSerializer.new(@provider, options).serialized_json, status: :ok + options[:params] = { current_ability: current_ability } + render json: ProviderSerializer.new(@provider, options).serialized_json, + status: :ok end protected - - def set_provider - @provider = Provider.unscoped.where("allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_ADMIN', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')").where(deleted_at: nil).where(symbol: params[:id]).first - fail ActiveRecord::RecordNotFound if @provider.blank? - end + def set_provider + @provider = + Provider.unscoped.where( + "allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_ADMIN', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')", + ). + where(deleted_at: nil). + where(symbol: params[:id]). + first + fail ActiveRecord::RecordNotFound if @provider.blank? + end end diff --git a/app/controllers/prefixes_controller.rb b/app/controllers/prefixes_controller.rb index a77db20ef..3afab5c94 100644 --- a/app/controllers/prefixes_controller.rb +++ b/app/controllers/prefixes_controller.rb @@ -1,41 +1,67 @@ +# frozen_string_literal: true + class PrefixesController < ApplicationController before_action :set_prefix, only: %i[show update destroy] before_action :authenticate_user! before_action :set_include load_and_authorize_resource except: %i[index show totals] - around_action :skip_bullet, only: [:index], if: -> { defined?(Bullet) } + around_action :skip_bullet, only: %i[index], if: -> { defined?(Bullet) } def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "uid" => { order: "asc", unmapped_type: "keyword" } } - when "-name" then { "uid" => { order: "desc", unmapped_type: "keyword" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - else { "uid" => { order: "asc", unmapped_type: "keyword" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "uid" => { order: "asc", unmapped_type: "keyword" } } + when "-name" + { "uid" => { order: "desc", unmapped_type: "keyword" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + else + { "uid" => { order: "asc", unmapped_type: "keyword" } } + end page = page_from_params(params) - response = if params[:id].present? - Prefix.find_by(id: params[:id]) - else - Prefix.query(params[:query], - year: params[:year], - state: params[:state], - provider_id: params[:provider_id], - client_id: params[:client_id], - page: page, - sort: sort) - end + response = + if params[:id].present? + Prefix.find_by(id: params[:id]) + else + Prefix.query( + params[:query], + year: params[:year], + state: params[:state], + provider_id: params[:provider_id], + client_id: params[:client_id], + page: page, + sort: sort, + ) + end begin total = response.results.total total_pages = page[:size].positive? ? (total.to_f / page[:size]).ceil : 0 - years = total.positive? ? facet_by_year(response.response.aggregations.years.buckets) : nil - states = total.positive? ? facet_by_key(response.response.aggregations.states.buckets) : nil - providers = total.positive? ? facet_by_combined_key(response.response.aggregations.providers.buckets) : nil - clients = total.positive? ? facet_by_combined_key(response.response.aggregations.clients.buckets) : nil + years = + if total.positive? + facet_by_year(response.response.aggregations.years.buckets) + end + states = + if total.positive? + facet_by_key(response.response.aggregations.states.buckets) + end + providers = + if total.positive? + facet_by_combined_key( + response.response.aggregations.providers.buckets, + ) + end + clients = + if total.positive? + facet_by_combined_key(response.response.aggregations.clients.buckets) + end prefixes = response.results @@ -52,27 +78,42 @@ def index options[:links] = { self: request.original_url, - next: prefixes.blank? ? nil : request.base_url + "/prefixes?" + { - query: params[:query], - prefix: params[:prefix], - year: params[:year], - provider_id: params[:provider_id], - client_id: params[:client_id], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if prefixes.blank? + nil + else + request.base_url + "/prefixes?" + + { + query: params[:query], + prefix: params[:prefix], + year: params[:year], + provider_id: params[:provider_id], + client_id: params[:client_id], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: PrefixSerializer.new(prefixes, options).serialized_json, status: :ok + render json: PrefixSerializer.new(prefixes, options).serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -81,7 +122,8 @@ def show options[:include] = @include options[:is_collection] = false - render json: PrefixSerializer.new(@prefix, options).serialized_json, status: :ok + render json: PrefixSerializer.new(@prefix, options).serialized_json, + status: :ok end def create @@ -93,16 +135,22 @@ def create options[:include] = @include options[:is_collection] = false - render json: PrefixSerializer.new(@prefix, options).serialized_json, status: :created, location: @prefix + render json: PrefixSerializer.new(@prefix, options).serialized_json, + status: :created, + location: @prefix else logger.error @prefix.errors.inspect - render json: serialize_errors(@prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@prefix.errors), + status: :unprocessable_entity end end def update response.headers["Allow"] = "HEAD, GET, POST, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end def destroy @@ -112,7 +160,8 @@ def destroy head :no_content else Rails.logger.error @prefix.errors.inspect - render json: serialize_errors(@prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@prefix.errors), + status: :unprocessable_entity end end @@ -120,37 +169,47 @@ def totals return [] if params[:client_id].blank? page = { size: 0, number: 1 } - response = Doi.query(nil, client_id: params[:client_id], state: "findable,registered", page: page, totals_agg: "prefix") - registrant = prefixes_totals(response.response.aggregations.prefixes_totals.buckets) + response = + Doi.query( + nil, + client_id: params[:client_id], + state: "findable,registered", + page: page, + totals_agg: "prefix", + ) + registrant = + prefixes_totals(response.response.aggregations.prefixes_totals.buckets) render json: registrant, status: :ok end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[clients providers client_prefixes provider_prefixes] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = + @include & %i[clients providers client_prefixes provider_prefixes] + else + @include = [] + end end - end private + def set_prefix + @prefix = Prefix.where(uid: params[:id]).first + + # fallback to call handle server, i.e. for prefixes not from DataCite + unless @prefix.present? || Rails.env.test? + @prefix = Handle.where(id: params[:id]) + end + fail ActiveRecord::RecordNotFound if @prefix.blank? + end - def set_prefix - @prefix = Prefix.where(uid: params[:id]).first - - # fallback to call handle server, i.e. for prefixes not from DataCite - @prefix = Handle.where(id: params[:id]) unless @prefix.present? || Rails.env.test? - fail ActiveRecord::RecordNotFound if @prefix.blank? - end - - def safe_params - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: %i[id created_at], - keys: { id: :uid } - ) - end + def safe_params + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: %i[id created_at], keys: { id: :uid }, + ) + end end diff --git a/app/controllers/provider_prefixes_controller.rb b/app/controllers/provider_prefixes_controller.rb index 28edf24c3..b79e0536d 100644 --- a/app/controllers/provider_prefixes_controller.rb +++ b/app/controllers/provider_prefixes_controller.rb @@ -1,41 +1,61 @@ +# frozen_string_literal: true + class ProviderPrefixesController < ApplicationController prepend_before_action :authenticate_user! before_action :set_provider_prefix, only: %i[show update destroy] before_action :set_include authorize_resource except: %i[index show] - around_action :skip_bullet, only: [:index], if: -> { defined?(Bullet) } + around_action :skip_bullet, only: %i[index], if: -> { defined?(Bullet) } def index - sort = case params[:sort] - when "name" then { "prefix_id" => { order: "asc", unmapped_type: "keyword" } } - when "-name" then { "prefix_id" => { order: "desc", unmapped_type: "keyword" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - else { created_at: { order: "desc" } } - end + sort = + case params[:sort] + when "name" + { "prefix_id" => { order: "asc", unmapped_type: "keyword" } } + when "-name" + { "prefix_id" => { order: "desc", unmapped_type: "keyword" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + else + { created_at: { order: "desc" } } + end page = page_from_params(params) if params[:id].present? response = ProviderPrefix.find_by(id: params[:id]) else - response = ProviderPrefix.query(params[:query], - prefix_id: params[:prefix_id], - consortium_id: params[:consortium_id], - provider_id: params[:provider_id], - consortium_organization_id: params[:consortium_organization_id], - state: params[:state], - year: params[:year], - page: page, - sort: sort) + response = + ProviderPrefix.query( + params[:query], + prefix_id: params[:prefix_id], + consortium_id: params[:consortium_id], + provider_id: params[:provider_id], + consortium_organization_id: params[:consortium_organization_id], + state: params[:state], + year: params[:year], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size].positive? ? (total.to_f / page[:size]).ceil : 0 - years = total.positive? ? facet_by_year(response.aggregations.years.buckets) : nil - states = total.positive? ? facet_by_key(response.aggregations.states.buckets) : nil - providers = total.positive? ? facet_by_combined_key(response.aggregations.providers.buckets) : nil + years = + if total.positive? + facet_by_year(response.aggregations.years.buckets) + end + states = + if total.positive? + facet_by_key(response.aggregations.states.buckets) + end + providers = + if total.positive? + facet_by_combined_key(response.aggregations.providers.buckets) + end provider_prefixes = response.results @@ -51,25 +71,42 @@ def index options[:links] = { self: request.original_url, - next: provider_prefixes.blank? ? nil : request.base_url + "/provider_prefixes?" + { - query: params[:query], - prefix: params[:prefix], - year: params[:year], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if provider_prefixes.blank? + nil + else + request.base_url + "/provider_prefixes?" + + { + query: params[:query], + prefix: params[:prefix], + year: params[:year], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: ProviderPrefixSerializer.new(provider_prefixes, options).serialized_json, status: :ok + render json: + ProviderPrefixSerializer.new(provider_prefixes, options). + serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -78,7 +115,10 @@ def show options[:include] = @include options[:is_collection] = false - render json: ProviderPrefixSerializer.new(@provider_prefix, options).serialized_json, status: :ok + render json: + ProviderPrefixSerializer.new(@provider_prefix, options). + serialized_json, + status: :ok end def create @@ -86,74 +126,108 @@ def create authorize! :create, @provider_prefix if @provider_prefix.save - if @provider_prefix.__elasticsearch__.index_document.dig("result") != "created" - logger.error "Error adding Provider Prefix #{@provider_prefix.uid} to Elasticsearch index." + if @provider_prefix.__elasticsearch__.index_document.dig("result") != + "created" + logger.error "Error adding Provider Prefix #{ + @provider_prefix.uid + } to Elasticsearch index." end - if @provider_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@provider_prefix.prefix.uid}." + if @provider_prefix.prefix.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @provider_prefix.prefix.uid + }." end - if @provider_prefix.provider.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Provider #{@provider_prefix.provider.uid}." + if @provider_prefix.provider.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Provider #{ + @provider_prefix.provider.uid + }." end options = {} options[:include] = @include options[:is_collection] = false - render json: ProviderPrefixSerializer.new(@provider_prefix, options).serialized_json, status: :created + render json: + ProviderPrefixSerializer.new(@provider_prefix, options). + serialized_json, + status: :created else Rails.logger.error @provider_prefix.errors.inspect - render json: serialize_errors(@provider_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@provider_prefix.errors), + status: :unprocessable_entity end end def update response.headers["Allow"] = "HEAD, GET, POST, DELETE, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end def destroy message = "Provider Prefix #{@provider_prefix.uid} deleted." if @provider_prefix.destroy - if @provider_prefix.__elasticsearch__.delete_document.dig("result") != "deleted" - logger.error "Error deleting Provider Prefix #{@provider_prefix.uid} from Elasticsearch index." + if @provider_prefix.__elasticsearch__.delete_document.dig("result") != + "deleted" + logger.error "Error deleting Provider Prefix #{ + @provider_prefix.uid + } from Elasticsearch index." end - if @provider_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@provider_prefix.prefix.uid}." + if @provider_prefix.prefix.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @provider_prefix.prefix.uid + }." end - if @provider_prefix.provider.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Provider #{@provider_prefix.provider.uid}." + if @provider_prefix.provider.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Provider #{ + @provider_prefix.provider.uid + }." end logger.info message head :no_content else Rails.logger.error @provider_prefix.errors.inspect - render json: serialize_errors(@provider_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@provider_prefix.errors), + status: :unprocessable_entity end end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[provider prefix clients client_prefixes] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[provider prefix clients client_prefixes] + else + @include = [] + end end - end private + def set_provider_prefix + @provider_prefix = ProviderPrefix.where(uid: params[:id]).first + fail ActiveRecord::RecordNotFound if @provider_prefix.blank? + end - def set_provider_prefix - @provider_prefix = ProviderPrefix.where(uid: params[:id]).first - fail ActiveRecord::RecordNotFound if @provider_prefix.blank? - end - - def safe_params - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: %i[id provider prefix] - ) - end + def safe_params + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: %i[id provider prefix], + ) + end end diff --git a/app/controllers/providers_controller.rb b/app/controllers/providers_controller.rb index 25adbb333..c906a6270 100644 --- a/app/controllers/providers_controller.rb +++ b/app/controllers/providers_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ProvidersController < ApplicationController include ActionController::MimeResponds include Countable @@ -8,46 +10,77 @@ class ProvidersController < ApplicationController load_and_authorize_resource only: %i[update destroy] def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "display_name.raw" => { order: "asc" } } - when "-name" then { "display_name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "display_name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "display_name.raw" => { order: "asc" } } + when "-name" + { "display_name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "display_name.raw" => { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Provider.find_by(id: params[:id]) + response = if params[:id].present? + Provider.find_by(id: params[:id]) elsif params[:ids].present? - response = Provider.find_by_id(params[:ids], page: page, sort: sort) + Provider.find_by_id(params[:ids], page: page, sort: sort) else - response = Provider.query(params[:query], - year: params[:year], - from_date: params[:from_date], - until_date: params[:until_date], - region: params[:region], - consortium_id: params[:consortium_id], - member_type: params[:member_type], - organization_type: params[:organization_type], - focus_area: params[:focus_area], - non_profit_status: params[:non_profit_status], - page: page, - sort: sort) + Provider.query( + params[:query], + year: params[:year], + from_date: params[:from_date], + until_date: params[:until_date], + region: params[:region], + consortium_id: params[:consortium_id], + member_type: params[:member_type], + organization_type: params[:organization_type], + focus_area: params[:focus_area], + non_profit_status: params[:non_profit_status], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_key_as_string(response.response.aggregations.years.buckets) : nil - regions = total > 0 ? facet_by_region(response.response.aggregations.regions.buckets) : nil - member_types = total > 0 ? facet_by_key(response.response.aggregations.member_types.buckets) : nil - organization_types = total > 0 ? facet_by_key(response.response.aggregations.organization_types.buckets) : nil - focus_areas = total > 0 ? facet_by_key(response.response.aggregations.focus_areas.buckets) : nil - non_profit_statuses = total > 0 ? facet_by_key(response.response.aggregations.non_profit_statuses.buckets) : nil + years = + if total > 0 + facet_by_key_as_string(response.response.aggregations.years.buckets) + end + regions = + if total > 0 + facet_by_region(response.response.aggregations.regions.buckets) + end + member_types = + if total > 0 + facet_by_key(response.response.aggregations.member_types.buckets) + end + organization_types = + if total > 0 + facet_by_key( + response.response.aggregations.organization_types.buckets, + ) + end + focus_areas = + if total > 0 + facet_by_key(response.response.aggregations.focus_areas.buckets) + end + non_profit_statuses = + if total > 0 + facet_by_key( + response.response.aggregations.non_profit_statuses.buckets, + ) + end @providers = response.results respond_to do |format| @@ -67,18 +100,25 @@ def index options[:links] = { self: request.original_url, - next: @providers.blank? ? nil : request.base_url + "/providers?" + { - query: params[:query], - year: params[:year], - region: params[:region], - "member_type" => params[:member_type], - "organization_type" => params[:organization_type], - "focus-area" => params[:focus_area], - "non-profit-status" => params[:non_profit_status], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: sort, - }.compact.to_query, + next: + if @providers.blank? + nil + else + request.base_url + "/providers?" + + { + query: params[:query], + year: params[:year], + region: params[:region], + "member_type" => params[:member_type], + "organization_type" => params[:organization_type], + "focus-area" => params[:focus_area], + "non-profit-status" => params[:non_profit_status], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: sort, + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true @@ -86,12 +126,21 @@ def index fields = fields_from_params(params) if fields - render json: ProviderSerializer.new(@providers, options.merge(fields: fields)).serialized_json, status: :ok + render json: + ProviderSerializer.new( + @providers, + options.merge(fields: fields), + ). + serialized_json, + status: :ok else - render json: ProviderSerializer.new(@providers, options).serialized_json, status: :ok + render json: + ProviderSerializer.new(@providers, options). + serialized_json, + status: :ok end end - header = %w( + header = %w[ accountName fabricaAccountId year @@ -141,15 +190,24 @@ def index created updated deleted_at - ) - format.csv { render request.format.to_sym => response.records.to_a, header: header } + ] + format.csv do + render request.format.to_sym => response.records.to_a, header: header + end end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -157,9 +215,12 @@ def show options = {} if @provider.member_type == "consortium" options[:meta] = { - "consortiumOrganizationCount" => Array.wrap(@provider.consortium_organization_ids).length, + "consortiumOrganizationCount" => + Array.wrap(@provider.consortium_organization_ids).length, } - elsif %w(direct_member consortium_organization).include?(@provider.member_type) + elsif %w[direct_member consortium_organization].include?( + @provider.member_type, + ) options[:meta] = { "repositoryCount" => Array.wrap(@provider.client_ids).length, } @@ -169,12 +230,16 @@ def show options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: ProviderSerializer.new(@provider, options).serialized_json, status: :ok + render json: ProviderSerializer.new(@provider, options).serialized_json, + status: :ok end def create # generate random symbol if no symbol is provided - @provider = Provider.new(safe_params.reverse_merge(symbol: generate_random_provider_symbol)) + @provider = + Provider.new( + safe_params.reverse_merge(symbol: generate_random_provider_symbol), + ) authorize! :create, @provider if @provider.save @@ -184,10 +249,12 @@ def create options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: ProviderSerializer.new(@provider, options).serialized_json, status: :ok + render json: ProviderSerializer.new(@provider, options).serialized_json, + status: :ok else Rails.logger.error @provider.errors.inspect - render json: serialize_errors(@provider.errors), status: :unprocessable_entity + render json: serialize_errors(@provider.errors), + status: :unprocessable_entity end end @@ -198,10 +265,12 @@ def update options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: ProviderSerializer.new(@provider, options).serialized_json, status: :ok + render json: ProviderSerializer.new(@provider, options).serialized_json, + status: :ok else Rails.logger.error @provider.errors.inspect - render json: serialize_errors(@provider.errors), status: :unprocessable_entity + render json: serialize_errors(@provider.errors), + status: :unprocessable_entity end end @@ -212,13 +281,19 @@ def destroy message = "Can't delete provider that has active clients." status = 400 Rails.logger.warn message - render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status + render json: { + errors: [{ status: status.to_s, title: message }], + }.to_json, + status: status elsif @provider.update(is_active: nil, deleted_at: Time.zone.now) - @provider.send_delete_email(responsible_id: current_user.uid) unless Rails.env.test? + unless Rails.env.test? + @provider.send_delete_email(responsible_id: current_user.uid) + end head :no_content else Rails.logger.error @provider.errors.inspect - render json: serialize_errors(@provider.errors), status: :unprocessable_entity + render json: serialize_errors(@provider.errors), + status: :unprocessable_entity end end @@ -230,9 +305,17 @@ def random def totals page = { size: 0, number: 1 } - state = current_user.present? && current_user.is_admin_or_staff? && params[:state].present? ? params[:state] : "registered,findable" - response = DataciteDoi.query(nil, state: state, page: page, totals_agg: "provider") - registrant = providers_totals(response.response.aggregations.providers_totals.buckets) + state = + if current_user.present? && current_user.is_admin_or_staff? && + params[:state].present? + params[:state] + else + "registered,findable" + end + response = + DataciteDoi.query(nil, state: state, page: page, totals_agg: "provider") + registrant = + providers_totals(response.response.aggregations.providers_totals.buckets) render json: registrant, status: :ok end @@ -265,67 +348,120 @@ def stats end meta = { + # downloads: downloads, providers: providers, clients: clients, dois: dois, - # "resourceTypes" => resource_types, - # citations: citations, - # views: views, - # downloads: downloads, }.compact render json: meta, status: :ok end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[consortium consortium_organizations] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[consortium consortium_organizations] + else + @include = [] + end end - end - def set_provider - @provider = Provider.unscoped.where("allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_ADMIN', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')").where(deleted_at: nil).where(symbol: params[:id]).first - fail ActiveRecord::RecordNotFound if @provider.blank? - end + def set_provider + @provider = + Provider.unscoped.where( + "allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONTRACTUAL_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_ADMIN', 'ROLE_MEMBER', 'ROLE_REGISTRATION_AGENCY')", + ). + where(deleted_at: nil). + where(symbol: params[:id]). + first + fail ActiveRecord::RecordNotFound if @provider.blank? + end private + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, - only: [ - :name, "displayName", :symbol, :logo, :description, :website, :joined, "globusUuid", "organizationType", "focusArea", :consortium, "systemEmail", "groupEmail", "isActive", "passwordInput", :country, "billingInformation", { "billingInformation": ["postCode", :state, :city, :address, :department, :organization, :country] }, "rorId", "twitterHandle", "memberType", "nonProfitStatus", "salesforceId", - "technicalContact", { "technicalContact": [:email, "givenName", "familyName"] }, - "secondaryTechnicalContact", { "secondaryTechnicalContact": [:email, "givenName", "familyName"] }, - "secondaryBillingContact", { "secondaryBillingContact": [:email, "givenName", "familyName"] }, - "billingContact", { "billingContact": [:email, "givenName", "familyName"] }, - "serviceContact", { "serviceContact": [:email, "givenName", "familyName"] }, - "secondaryServiceContact", { "secondaryServiceContact": [:email, "givenName", "familyName"] }, - "votingContact", { "votingContact": [:email, "givenName", "familyName"] } - ], - keys: { - "displayName" => :display_name, - "organizationType" => :organization_type, "focusArea" => :focus_area, :country => :country_code, "isActive" => :is_active, "passwordInput" => :password_input, "billingInformation" => :billing_information, "postCode" => :post_code, "rorId" => :ror_id, "twitterHandle" => :twitter_handle, "memberType" => :member_type, - "technicalContact" => :technical_contact, - "secondaryTechnicalContact" => :secondary_technical_contact, - "secondaryBillingContact" => :secondary_billing_contact, - "billingContact" => :billing_contact, - "serviceContact" => :service_contact, - "secondaryServiceContact" => :secondary_service_contact, - "votingContact" => :voting_contact, - "groupEmail" => :group_email, - "systemEmail" => :system_email, - "nonProfitStatus" => :non_profit_status, - "salesforceId" => :salesforce_id, - "globusUuid" => :globus_uuid - }, - ) - end + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [ + :name, + "displayName", + :symbol, + :logo, + :description, + :website, + :joined, + "globusUuid", + "organizationType", + "focusArea", + :consortium, + "systemEmail", + "groupEmail", + "isActive", + "passwordInput", + :country, + "billingInformation", + { + "billingInformation": [ + "postCode", + :state, + :city, + :address, + :department, + :organization, + :country, + ], + }, + "rorId", + "twitterHandle", + "memberType", + "nonProfitStatus", + "salesforceId", + "technicalContact", + { "technicalContact": [:email, "givenName", "familyName"] }, + "secondaryTechnicalContact", + { "secondaryTechnicalContact": [:email, "givenName", "familyName"] }, + "secondaryBillingContact", + { "secondaryBillingContact": [:email, "givenName", "familyName"] }, + "billingContact", + { "billingContact": [:email, "givenName", "familyName"] }, + "serviceContact", + { "serviceContact": [:email, "givenName", "familyName"] }, + "secondaryServiceContact", + { "secondaryServiceContact": [:email, "givenName", "familyName"] }, + "votingContact", + { "votingContact": [:email, "givenName", "familyName"] }, + ], + keys: { + "displayName" => :display_name, + "organizationType" => :organization_type, + "focusArea" => :focus_area, + country: :country_code, + "isActive" => :is_active, + "passwordInput" => :password_input, + "billingInformation" => :billing_information, + "postCode" => :post_code, + "rorId" => :ror_id, + "twitterHandle" => :twitter_handle, + "memberType" => :member_type, + "technicalContact" => :technical_contact, + "secondaryTechnicalContact" => :secondary_technical_contact, + "secondaryBillingContact" => :secondary_billing_contact, + "billingContact" => :billing_contact, + "serviceContact" => :service_contact, + "secondaryServiceContact" => :secondary_service_contact, + "votingContact" => :voting_contact, + "groupEmail" => :group_email, + "systemEmail" => :system_email, + "nonProfitStatus" => :non_profit_status, + "salesforceId" => :salesforce_id, + "globusUuid" => :globus_uuid, + }, + ) + end end diff --git a/app/controllers/random_controller.rb b/app/controllers/random_controller.rb index 6ff92d743..c26c4e8f2 100644 --- a/app/controllers/random_controller.rb +++ b/app/controllers/random_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class RandomController < ApplicationController before_action :authenticate_user! load_and_authorize_resource Phrase diff --git a/app/controllers/repositories_controller.rb b/app/controllers/repositories_controller.rb index eceaf2f23..53c238e74 100644 --- a/app/controllers/repositories_controller.rb +++ b/app/controllers/repositories_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class RepositoriesController < ApplicationController include ActionController::MimeResponds include Countable @@ -5,51 +7,78 @@ class RepositoriesController < ApplicationController before_action :set_repository, only: %i[show update destroy] before_action :authenticate_user! before_action :set_include - load_and_authorize_resource :client, parent: false, except: %i[index show create totals random stats] - around_action :skip_bullet, only: [:index], if: -> { defined?(Bullet) } + load_and_authorize_resource :client, + parent: false, + except: %i[index show create totals random stats] + around_action :skip_bullet, only: %i[index], if: -> { defined?(Bullet) } def index - sort = case params[:sort] - when "relevance" then { "_score" => { order: "desc" } } - when "name" then { "name.raw" => { order: "asc" } } - when "-name" then { "name.raw" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - else { "name.raw" => { order: "asc" } } - end + sort = + case params[:sort] + when "relevance" + { "_score" => { order: "desc" } } + when "name" + { "name.raw" => { order: "asc" } } + when "-name" + { "name.raw" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + else + { "name.raw" => { order: "asc" } } + end page = page_from_params(params) - if params[:id].present? - response = Client.find_by(id: params[:id]) + response = if params[:id].present? + Client.find_by(id: params[:id]) elsif params[:ids].present? - response = Client.find_by_id(params[:ids], page: page, sort: sort) + Client.find_by_id(params[:ids], page: page, sort: sort) else - response = Client.query(params[:query], - year: params[:year], - from_date: params[:from_date], - until_date: params[:until_date], - provider_id: params[:provider_id], - consortium_id: params[:consortium_id], - re3data_id: params[:re3data_id], - opendoar_id: params[:opendoar_id], - software: params[:software], - certificate: params[:certificate], - repository_type: params[:repository_type], - client_type: params[:client_type], - page: page, - sort: sort) + Client.query( + params[:query], + year: params[:year], + from_date: params[:from_date], + until_date: params[:until_date], + provider_id: params[:provider_id], + consortium_id: params[:consortium_id], + re3data_id: params[:re3data_id], + opendoar_id: params[:opendoar_id], + software: params[:software], + certificate: params[:certificate], + repository_type: params[:repository_type], + client_type: params[:client_type], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size] > 0 ? (total.to_f / page[:size]).ceil : 0 - years = total > 0 ? facet_by_year(response.aggregations.years.buckets) : nil - providers = total > 0 ? facet_by_combined_key(response.aggregations.providers.buckets) : nil - software = total > 0 ? facet_by_software(response.aggregations.software.buckets) : nil - certificates = total > 0 ? facet_by_key(response.aggregations.certificates.buckets) : nil - client_types = total > 0 ? facet_by_key(response.aggregations.client_types.buckets) : nil - repository_types = total > 0 ? facet_by_key(response.aggregations.repository_types.buckets) : nil + years = + total > 0 ? facet_by_year(response.aggregations.years.buckets) : nil + providers = + if total > 0 + facet_by_combined_key(response.aggregations.providers.buckets) + end + software = + if total > 0 + facet_by_software(response.aggregations.software.buckets) + end + certificates = + if total > 0 + facet_by_key(response.aggregations.certificates.buckets) + end + client_types = + if total > 0 + facet_by_key(response.aggregations.client_types.buckets) + end + repository_types = + if total > 0 + facet_by_key(response.aggregations.repository_types.buckets) + end respond_to do |format| format.json do @@ -68,18 +97,25 @@ def index options[:links] = { self: request.original_url, - next: response.results.blank? ? nil : request.base_url + "/repositories?" + { - query: params[:query], - "provider-id" => params[:provider_id], - software: params[:software], - certificate: params[:certificate], - "client-type" => params[:client_type], - "repository-type" => params[:repository_type], - year: params[:year], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if response.results.blank? + nil + else + request.base_url + "/repositories?" + + { + query: params[:query], + "provider-id" => params[:provider_id], + software: params[:software], + certificate: params[:certificate], + "client-type" => params[:client_type], + "repository-type" => params[:repository_type], + year: params[:year], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true @@ -87,12 +123,21 @@ def index fields = fields_from_params(params) if fields - render json: RepositorySerializer.new(response.results, options.merge(fields: fields)).serialized_json, status: :ok + render json: + RepositorySerializer.new( + response.results, + options.merge(fields: fields), + ). + serialized_json, + status: :ok else - render json: RepositorySerializer.new(response.results, options).serialized_json, status: :ok + render json: + RepositorySerializer.new(response.results, options). + serialized_json, + status: :ok end end - header = %w( + header = %w[ accountName fabricaAccountId parentFabricaAccountId @@ -108,15 +153,24 @@ def index url software system_email - ) - format.csv { render request.format.to_sym => response.records.to_a, header: header } + ] + format.csv do + render request.format.to_sym => response.records.to_a, header: header + end end rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -126,17 +180,19 @@ def show options = {} options[:meta] = { - "doiCount" => doi_count(client_id: params[:id]).reduce(0) do |sum, item| - sum += item["count"] - sum - end, + "doiCount" => + doi_count(client_id: params[:id]).reduce(0) do |sum, item| + sum += item["count"] + sum + end, "prefixCount" => Array.wrap(repository.prefix_ids).length, }.compact options[:include] = @include options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: RepositorySerializer.new(repository, options).serialized_json, status: :ok + render json: RepositorySerializer.new(repository, options).serialized_json, + status: :ok end def create @@ -150,10 +206,12 @@ def create options[:is_collection] = false options[:params] = { current_ability: current_ability } - render json: RepositorySerializer.new(@client, options).serialized_json, status: :created + render json: RepositorySerializer.new(@client, options).serialized_json, + status: :created else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end @@ -167,13 +225,15 @@ def update authorize! :transfer, @client @client.transfer(provider_target_id: safe_params[:target_id]) - render json: RepositorySerializer.new(@client, options).serialized_json, status: :ok + render json: RepositorySerializer.new(@client, options).serialized_json, + status: :ok elsif @client.update(safe_params) - - render json: RepositorySerializer.new(@client, options).serialized_json, status: :ok + render json: RepositorySerializer.new(@client, options).serialized_json, + status: :ok else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end @@ -184,13 +244,17 @@ def destroy message = "Can't delete repository that has DOIs." status = 400 Rails.logger.warn message - render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status + render json: { + errors: [{ status: status.to_s, title: message }], + }.to_json, + status: status elsif @client.update(is_active: nil, deleted_at: Time.zone.now) @client.send_delete_email unless Rails.env.test? head :no_content else Rails.logger.error @client.errors.inspect - render json: serialize_errors(@client.errors), status: :unprocessable_entity + render json: serialize_errors(@client.errors), + status: :unprocessable_entity end end @@ -202,49 +266,113 @@ def random def totals page = { size: 0, number: 1 } - state = current_user.present? && current_user.is_admin_or_staff? && params[:state].present? ? params[:state] : "registered,findable" - response = DataciteDoi.query(nil, provider_id: params[:provider_id], state: state, page: page, totals_agg: "client") - registrant = response.results.total.positive? ? clients_totals(response.aggregations.clients_totals.buckets) : [] + state = + if current_user.present? && current_user.is_admin_or_staff? && + params[:state].present? + params[:state] + else + "registered,findable" + end + response = + DataciteDoi.query( + nil, + provider_id: params[:provider_id], + state: state, + page: page, + totals_agg: "client", + ) + registrant = + if response.results.total.positive? + clients_totals(response.aggregations.clients_totals.buckets) + else + [] + end render json: registrant, status: :ok end def stats meta = { - dois: doi_count(client_id: params[:id]), + dois: + doi_count( + client_id: + # downloads: download_count(client_id: params[:id]), + params[ + :id + ], + ), "resourceTypes" => resource_type_count(client_id: params[:id]), - # citations: citation_count(client_id: params[:id]), - # views: view_count(client_id: params[:id]), - # downloads: download_count(client_id: params[:id]), }.compact render json: meta, status: :ok end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & [:provider] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[provider] + else + @include = [] + end end - end - def set_repository - @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first - fail ActiveRecord::RecordNotFound if @client.blank? - end + def set_repository + @client = Client.where(symbol: params[:id]).where(deleted_at: nil).first + fail ActiveRecord::RecordNotFound if @client.blank? + end private + def safe_params + if params[:data].blank? + fail JSON::ParserError, + "You need to provide a payload following the JSONAPI spec" + end - def safe_params - fail JSON::ParserError, "You need to provide a payload following the JSONAPI spec" if params[:data].blank? - - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:symbol, :name, "systemEmail", :domains, :provider, :url, "globusUuid", "repositoryType", { "repositoryType" => [] }, :description, :language, { language: [] }, "alternateName", :software, "targetId", "isActive", "passwordInput", "clientType", :re3data, :opendoar, :issn, { issn: %i[issnl electronic print] }, :certificate, { certificate: [] }, "serviceContact", { "serviceContact": [:email, "givenName", "familyName"] }, "salesforceId"], - keys: { "systemEmail" => :system_email, "salesforceId" => :salesforce_id, "globusUuid" => :globus_uuid, "targetId" => :target_id, "isActive" => :is_active, "passwordInput" => :password_input, "clientType" => :client_type, "alternateName" => :alternate_name, "repositoryType" => :repository_type, "serviceContact" => :service_contact } - ) - end + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [ + :symbol, + :name, + "systemEmail", + :domains, + :provider, + :url, + "globusUuid", + "repositoryType", + { "repositoryType" => [] }, + :description, + :language, + { language: [] }, + "alternateName", + :software, + "targetId", + "isActive", + "passwordInput", + "clientType", + :re3data, + :opendoar, + :issn, + { issn: %i[issnl electronic print] }, + :certificate, + { certificate: [] }, + "serviceContact", + { "serviceContact": [:email, "givenName", "familyName"] }, + "salesforceId", + ], + keys: { + "systemEmail" => :system_email, + "salesforceId" => :salesforce_id, + "globusUuid" => :globus_uuid, + "targetId" => :target_id, + "isActive" => :is_active, + "passwordInput" => :password_input, + "clientType" => :client_type, + "alternateName" => :alternate_name, + "repositoryType" => :repository_type, + "serviceContact" => :service_contact, + }, + ) + end end diff --git a/app/controllers/repository_prefixes_controller.rb b/app/controllers/repository_prefixes_controller.rb index 157150e0a..df8ffbd23 100644 --- a/app/controllers/repository_prefixes_controller.rb +++ b/app/controllers/repository_prefixes_controller.rb @@ -1,40 +1,61 @@ +# frozen_string_literal: true + require "uri" class RepositoryPrefixesController < ApplicationController before_action :set_client_prefix, only: %i[show update destroy] before_action :authenticate_user! before_action :set_include - around_action :skip_bullet, only: [:index], if: -> { defined?(Bullet) } + around_action :skip_bullet, only: %i[index], if: -> { defined?(Bullet) } def index - sort = case params[:sort] - when "name" then { "prefix_id" => { order: "asc" } } - when "-name" then { "prefix_id" => { order: "desc" } } - when "created" then { created_at: { order: "asc" } } - when "-created" then { created_at: { order: "desc" } } - else { created_at: { order: "desc" } } - end + sort = + case params[:sort] + when "name" + { "prefix_id" => { order: "asc" } } + when "-name" + { "prefix_id" => { order: "desc" } } + when "created" + { created_at: { order: "asc" } } + when "-created" + { created_at: { order: "desc" } } + else + { created_at: { order: "desc" } } + end page = page_from_params(params) - if params[:id].present? - response = ClientPrefix.find_by(id: params[:id]) + response = if params[:id].present? + ClientPrefix.find_by(id: params[:id]) else - response = ClientPrefix.query(params[:query], - client_id: params[:repository_id], - prefix_id: params[:prefix_id], - prefix: params[:prefix], - year: params[:year], - page: page, - sort: sort) + ClientPrefix.query( + params[:query], + client_id: params[:repository_id], + prefix_id: params[:prefix_id], + prefix: params[:prefix], + year: params[:year], + page: page, + sort: sort, + ) end begin total = response.results.total total_pages = page[:size].positive? ? (total.to_f / page[:size]).ceil : 0 - years = total.positive? ? facet_by_year(response.response.aggregations.years.buckets) : nil - providers = total.positive? ? facet_by_combined_key(response.response.aggregations.providers.buckets) : nil - repositories = total.positive? ? facet_by_combined_key(response.response.aggregations.clients.buckets) : nil + years = + if total.positive? + facet_by_year(response.response.aggregations.years.buckets) + end + providers = + if total.positive? + facet_by_combined_key( + response.response.aggregations.providers.buckets, + ) + end + repositories = + if total.positive? + facet_by_combined_key(response.response.aggregations.clients.buckets) + end repository_prefixes = response.results @@ -50,26 +71,43 @@ def index options[:links] = { self: request.original_url, - next: repository_prefixes.blank? ? nil : request.base_url + "/repository-prefixes?" + { - query: params[:query], - prefix_id: params[:prefix_id], - repository_id: params[:repository_id], - year: params[:year], - "page[number]" => page[:number] + 1, - "page[size]" => page[:size], - sort: params[:sort], - }.compact.to_query, + next: + if repository_prefixes.blank? + nil + else + request.base_url + "/repository-prefixes?" + + { + query: params[:query], + prefix_id: params[:prefix_id], + repository_id: params[:repository_id], + year: params[:year], + "page[number]" => page[:number] + 1, + "page[size]" => page[:size], + sort: params[:sort], + }.compact. + to_query + end, }.compact options[:include] = @include options[:is_collection] = true - render json: RepositoryPrefixSerializer.new(repository_prefixes, options).serialized_json, status: :ok + render json: + RepositoryPrefixSerializer.new(repository_prefixes, options). + serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e Raven.capture_exception(e) - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -78,7 +116,10 @@ def show options[:include] = @include options[:is_collection] = false - render json: RepositoryPrefixSerializer.new(@client_prefix, options).serialized_json, status: :ok + render json: + RepositoryPrefixSerializer.new(@client_prefix, options). + serialized_json, + status: :ok end def create @@ -86,31 +127,49 @@ def create authorize! :create, @client_prefix if @client_prefix.save - if @client_prefix.__elasticsearch__.index_document.dig("result") != "created" - logger.error "Error adding Repository Prefix #{@client_prefix.uid} to Elasticsearch index." + if @client_prefix.__elasticsearch__.index_document.dig("result") != + "created" + logger.error "Error adding Repository Prefix #{ + @client_prefix.uid + } to Elasticsearch index." end - if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@client_prefix.prefix.uid}." + if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @client_prefix.prefix.uid + }." end - if @client_prefix.provider_prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Provider Prefix #{@client_prefix.provider_prefix.uid}." + if @client_prefix.provider_prefix.__elasticsearch__.index_document.dig( + "result", + ) != + "updated" + logger.error "Error updating Elasticsearch index for Provider Prefix #{ + @client_prefix.provider_prefix.uid + }." end options = {} options[:include] = @include options[:is_collection] = false - render json: RepositoryPrefixSerializer.new(@client_prefix, options).serialized_json, status: :created + render json: + RepositoryPrefixSerializer.new(@client_prefix, options). + serialized_json, + status: :created else Rails.logger.error @client_prefix.errors.inspect - render json: serialize_errors(@client_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@client_prefix.errors), + status: :unprocessable_entity end end def update authorize! :update, @client_prefix response.headers["Allow"] = "HEAD, GET, POST, DELETE, OPTIONS" - render json: { errors: [{ status: "405", title: "Method not allowed" }] }.to_json, status: :method_not_allowed + render json: { + errors: [{ status: "405", title: "Method not allowed" }], + }.to_json, + status: :method_not_allowed end def destroy @@ -118,46 +177,55 @@ def destroy message = "Client prefix #{@client_prefix.uid} deleted." if @client_prefix.destroy - if @client_prefix.__elasticsearch__.delete_document.dig("result") != "deleted" - logger.error "Error deleting Repository Prefix #{@client_prefix.uid} from Elasticsearch index." + if @client_prefix.__elasticsearch__.delete_document.dig("result") != + "deleted" + logger.error "Error deleting Repository Prefix #{ + @client_prefix.uid + } from Elasticsearch index." end - if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != "updated" - logger.error "Error updating Elasticsearch index for Prefix #{@client_prefix.prefix.uid}." + if @client_prefix.prefix.__elasticsearch__.index_document.dig("result") != + "updated" + logger.error "Error updating Elasticsearch index for Prefix #{ + @client_prefix.prefix.uid + }." end if @client_prefix.provider_prefix.__elasticsearch__.index_document - logger.error "Error updating Elasticsearch index for Provider Prefix #{@client_prefix.provider_prefix.uid}." + logger.error "Error updating Elasticsearch index for Provider Prefix #{ + @client_prefix.provider_prefix.uid + }." end logger.warn message head :no_content else Rails.logger.error @client_prefix.errors.inspect - render json: serialize_errors(@client_prefix.errors), status: :unprocessable_entity + render json: serialize_errors(@client_prefix.errors), + status: :unprocessable_entity end end protected - - def set_include - if params[:include].present? - @include = params[:include].split(",").map { |i| i.downcase.underscore.to_sym } - @include = @include & %i[repository prefix provider_prefix provider] - else - @include = [] + def set_include + if params[:include].present? + @include = + params[:include].split(",").map { |i| i.downcase.underscore.to_sym } + @include = @include & %i[repository prefix provider_prefix provider] + else + @include = [] + end end - end private + def set_client_prefix + @client_prefix = ClientPrefix.where(uid: params[:id]).first + fail ActiveRecord::RecordNotFound if @client_prefix.blank? + end - def set_client_prefix - @client_prefix = ClientPrefix.where(uid: params[:id]).first - fail ActiveRecord::RecordNotFound if @client_prefix.blank? - end - - def safe_params - ActiveModelSerializers::Deserialization.jsonapi_parse!( - params, only: [:id, :repository, :prefix, "provider-prefix"], - keys: { repository: :client, "provider-prefix" => :provider_prefix } - ) - end + def safe_params + ActiveModelSerializers::Deserialization.jsonapi_parse!( + params, + only: [:id, :repository, :prefix, "provider-prefix"], + keys: { repository: :client, "provider-prefix" => :provider_prefix }, + ) + end end diff --git a/app/controllers/resource_types_controller.rb b/app/controllers/resource_types_controller.rb index 49989e7af..2bb9f3079 100644 --- a/app/controllers/resource_types_controller.rb +++ b/app/controllers/resource_types_controller.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ResourceTypesController < ApplicationController def index @resource_types = ResourceType.where(params) @@ -10,7 +12,10 @@ def index }.compact options[:is_collection] = true - render json: ResourceTypeSerializer.new(@resource_types[:data], options).serialized_json, status: :ok + render json: + ResourceTypeSerializer.new(@resource_types[:data], options). + serialized_json, + status: :ok end def show @@ -20,6 +25,9 @@ def show options = {} options[:is_collection] = false - render json: ResourceTypeSerializer.new(@resource_type[:data], options).serialized_json, status: :ok + render json: + ResourceTypeSerializer.new(@resource_type[:data], options). + serialized_json, + status: :ok end end diff --git a/app/controllers/sessions_controller.rb b/app/controllers/sessions_controller.rb index bfaef306d..974e24ce4 100644 --- a/app/controllers/sessions_controller.rb +++ b/app/controllers/sessions_controller.rb @@ -1,27 +1,45 @@ +# frozen_string_literal: true + class SessionsController < ApplicationController def create_token - error_response("Wrong grant type.") && return if safe_params[:grant_type] != "password" - error_response("Missing account ID or password.") && return if - safe_params[:username].blank? || safe_params[:username] == "undefined" || - safe_params[:password].blank? || safe_params[:password] == "undefined" + if safe_params[:grant_type] != "password" + error_response("Wrong grant type.") && return + end + if safe_params[:username].blank? || safe_params[:username] == "undefined" || + safe_params[:password].blank? || + safe_params[:password] == "undefined" + error_response("Missing account ID or password.") && return + end - credentials = User.encode_auth_param(username: safe_params[:username], password: safe_params[:password]) + credentials = + User.encode_auth_param( + username: safe_params[:username], password: safe_params[:password], + ) user = User.new(credentials, type: "basic") error_response(user.errors) && return if user.errors.present? - error_response("Wrong account ID or password.") && return if user.role_id == "anonymous" + if user.role_id == "anonymous" + error_response("Wrong account ID or password.") && return + end - render json: { "access_token" => user.jwt, "expires_in" => 3600 * 24 * 30 }.to_json, status: :ok + render json: { + "access_token" => user.jwt, "expires_in" => 3_600 * 24 * 30 + }.to_json, + status: :ok end def create_oidc_token - error_response("Missing token.") && return if - safe_params[:token].blank? || safe_params[:token] == "undefined" + if safe_params[:token].blank? || safe_params[:token] == "undefined" + error_response("Missing token.") && return + end user = User.new(safe_params[:token], type: "oidc") error_response(user.errors) && return if user.errors.present? - render json: { "access_token" => user.jwt, "expires_in" => 3600 * 24 * 30 }.to_json, status: :ok + render json: { + "access_token" => user.jwt, "expires_in" => 3_600 * 24 * 30 + }.to_json, + status: :ok end def reset @@ -43,14 +61,26 @@ def reset end private + def error_response(message) + status = 400 + logger.error message + render json: { errors: [{ status: status.to_s, title: message }] }.to_json, + status: status + end - def error_response(message) - status = 400 - logger.error message - render json: { errors: [{ status: status.to_s, title: message }] }.to_json, status: status - end - - def safe_params - params.permit(:grant_type, :username, :password, :token, :client_id, :client_secret, :refresh_token, :session, :format, :controller, :action) - end + def safe_params + params.permit( + :grant_type, + :username, + :password, + :token, + :client_id, + :client_secret, + :refresh_token, + :session, + :format, + :controller, + :action, + ) + end end diff --git a/app/controllers/works_controller.rb b/app/controllers/works_controller.rb index 9620d994d..33b45a1a5 100644 --- a/app/controllers/works_controller.rb +++ b/app/controllers/works_controller.rb @@ -1,64 +1,108 @@ +# frozen_string_literal: true + class WorksController < ApplicationController - before_action :set_doi, only: [:show] + before_action :set_doi, only: %i[show] before_action :set_include, only: %i[index show] def index - sort = case params[:sort] - when "name" then { "doi" => { order: "asc" } } - when "-name" then { "doi" => { order: "desc" } } - when "created" then { created: { order: "asc" } } - when "-created" then { created: { order: "desc" } } - when "updated" then { updated: { order: "asc" } } - when "-updated" then { updated: { order: "desc" } } - when "relevance" then { "_score": { "order": "desc" } } - else { updated: { order: "desc" } } - end + sort = + case params[:sort] + when "name" + { "doi" => { order: "asc" } } + when "-name" + { "doi" => { order: "desc" } } + when "created" + { created: { order: "asc" } } + when "-created" + { created: { order: "desc" } } + when "updated" + { updated: { order: "asc" } } + when "-updated" + { updated: { order: "desc" } } + when "relevance" + { "_score": { "order": "desc" } } + else + { updated: { order: "desc" } } + end page = page_from_params(params) - sample_group_field = case params[:sample_group] - when "client" then "client_id" - when "data-center" then "client_id" - when "provider" then "provider_id" - when "resource-type" then "types.resourceTypeGeneral" - end + sample_group_field = + case params[:sample_group] + when "client" + "client_id" + when "data-center" + "client_id" + when "provider" + "provider_id" + when "resource-type" + "types.resourceTypeGeneral" + end - if params[:id].present? - response = DataciteDoi.find_by(id: params[:id]) + response = if params[:id].present? + DataciteDoi.find_by(id: params[:id]) elsif params[:ids].present? - response = DataciteDoi.find_by_ids(params[:ids], page: page, sort: sort) + DataciteDoi.find_by_ids(params[:ids], page: page, sort: sort) else - response = DataciteDoi.query(params[:query], - state: "findable", - exclude_registration_agencies: true, - created: params[:created], - registered: params[:registered], - provider_id: params[:member_id], - client_id: params[:data_center_id], - affiliation_id: params[:affiliation_id], - prefix: params[:prefix], - user_id: params[:person_id], - resource_type_id: params[:resource_type_id], - has_citations: params[:has_citations], - has_views: params[:has_views], - has_downloads: params[:has_downloads], - schema_version: params[:schema_version], - sample_group: sample_group_field, - sample_size: params[:sample], - page: page, - sort: sort, - random: params[:sample].present? ? true : false) + DataciteDoi.query( + params[:query], + state: "findable", + exclude_registration_agencies: true, + created: params[:created], + registered: params[:registered], + provider_id: params[:member_id], + client_id: params[:data_center_id], + affiliation_id: params[:affiliation_id], + prefix: params[:prefix], + user_id: params[:person_id], + resource_type_id: params[:resource_type_id], + has_citations: params[:has_citations], + has_views: params[:has_views], + has_downloads: params[:has_downloads], + schema_version: params[:schema_version], + sample_group: sample_group_field, + sample_size: params[:sample], + page: page, + sort: sort, + random: params[:sample].present? ? true : false, + ) end begin total = response.results.total - total_pages = page[:size].positive? ? ([total.to_f, 10000].min / page[:size]).ceil : 0 + total_pages = + if page[:size].positive? + ([total.to_f, 10_000].min / page[:size]).ceil + else + 0 + end - resource_types = total > 0 ? facet_by_combined_key(response.response.aggregations.resource_types.buckets) : nil - registered = total > 0 ? facet_by_year(response.response.aggregations.registered.buckets) : nil - providers = total > 0 ? facet_by_combined_key(response.response.aggregations.providers.buckets) : nil - clients = total > 0 ? facet_by_combined_key(response.response.aggregations.clients.buckets) : nil - affiliations = total > 0 ? facet_by_combined_key(response.response.aggregations.affiliations.buckets) : nil + resource_types = + if total > 0 + facet_by_combined_key( + response.response.aggregations.resource_types.buckets, + ) + end + registered = + if total > 0 + facet_by_year(response.response.aggregations.registered.buckets) + end + providers = + if total > 0 + facet_by_combined_key( + response.response.aggregations.providers.buckets, + ) + end + clients = + if total > 0 + facet_by_combined_key(response.response.aggregations.clients.buckets) + end + affiliations = + if total > 0 + facet_by_combined_key( + response.response.aggregations.affiliations.buckets, + ) + end @dois = response.results @@ -76,9 +120,7 @@ def index options[:include] = @include options[:is_collection] = true options[:links] = nil - options[:params] = { - current_ability: current_ability, - } + options[:params] = { current_ability: current_ability } # If we're using sample groups we need to unpack the results from the aggregation bucket hits. if sample_group_field.present? @@ -93,11 +135,19 @@ def index # Results to return are either our sample group dois or the regular hit results @dois = sample_dois || response.results - render json: WorkSerializer.new(@dois, options).serialized_json, status: :ok + render json: WorkSerializer.new(@dois, options).serialized_json, + status: :ok rescue Elasticsearch::Transport::Transport::Errors::BadRequest => e - message = JSON.parse(e.message[6..-1]).to_h.dig("error", "root_cause", 0, "reason") - - render json: { "errors" => { "title" => message } }.to_json, status: :bad_request + message = + JSON.parse(e.message[6..-1]).to_h.dig( + "error", + "root_cause", + 0, + "reason", + ) + + render json: { "errors" => { "title" => message } }.to_json, + status: :bad_request end end @@ -105,36 +155,34 @@ def show options = {} options[:include] = @include options[:is_collection] = false - options[:params] = { - current_ability: current_ability, - detail: true, - } + options[:params] = { current_ability: current_ability, detail: true } render json: WorkSerializer.new(@doi, options).serialized_json, status: :ok end protected + def set_doi + @doi = + DataciteDoi.where(doi: params[:id]).where(aasm_state: "findable").first - def set_doi - @doi = DataciteDoi.where(doi: params[:id]).where(aasm_state: "findable").first - - fail ActiveRecord::RecordNotFound if @doi.blank? - end + fail ActiveRecord::RecordNotFound if @doi.blank? + end - def set_include - if params[:include].present? - include_keys = { - "data_center" => :client, - "member" => :provider, - "resource_type" => :resource_type, - } - @include = params[:include].split(",").reduce([]) do |sum, i| - k = include_keys[i.downcase.underscore] - sum << k if k.present? - sum + def set_include + if params[:include].present? + include_keys = { + "data_center" => :client, + "member" => :provider, + "resource_type" => :resource_type, + } + @include = + params[:include].split(",").reduce([]) do |sum, i| + k = include_keys[i.downcase.underscore] + sum << k if k.present? + sum + end + else + @include = [] end - else - @include = [] end - end end diff --git a/app/graphql/connections/elasticsearch_model_response_connection.rb b/app/graphql/connections/elasticsearch_model_response_connection.rb index 4fd40ad50..0730e007f 100644 --- a/app/graphql/connections/elasticsearch_model_response_connection.rb +++ b/app/graphql/connections/elasticsearch_model_response_connection.rb @@ -1,168 +1,172 @@ - # frozen_string_literal: true - - # A Connection wraps a list of items and provides cursor-based pagination over it. - # - # Connections were introduced by Facebook's `Relay` front-end framework, but - # proved to be generally useful for GraphQL APIs. When in doubt, use connections - # to serve lists (like Arrays, ActiveRecord::Relations) via GraphQL. - # - # Unlike the previous connection implementation, these default to bidirectional pagination. - # - # Pagination arguments and context may be provided at initialization or assigned later (see {Schema::Field::ConnectionExtension}). - class ElasticsearchModelResponseConnection - class PaginationImplementationMissingError < GraphQL::Error - end +# frozen_string_literal: true + +# A Connection wraps a list of items and provides cursor-based pagination over it. +# +# Connections were introduced by Facebook's `Relay` front-end framework, but +# proved to be generally useful for GraphQL APIs. When in doubt, use connections +# to serve lists (like Arrays, ActiveRecord::Relations) via GraphQL. +# +# Unlike the previous connection implementation, these default to bidirectional pagination. +# +# Pagination arguments and context may be provided at initialization or assigned later (see {Schema::Field::ConnectionExtension}). +class ElasticsearchModelResponseConnection + class PaginationImplementationMissingError < GraphQL::Error; end + + # @return [Class] The class to use for wrapping items as `edges { ... }`. Defaults to `Connection::Edge` + def self.edge_class + self::Edge + end - # @return [Class] The class to use for wrapping items as `edges { ... }`. Defaults to `Connection::Edge` - def self.edge_class - self::Edge - end + # @return [Object] A list object, from the application. This is the unpaginated value passed into the connection. + attr_reader :items - # @return [Object] A list object, from the application. This is the unpaginated value passed into the connection. - attr_reader :items + # @return [Object] A list object, from the application. This is the paginated value passed into the connection. + attr_reader :nodes - # @return [Object] A list object, from the application. This is the paginated value passed into the connection. - attr_reader :nodes + # @return [Object] A list object, from the application. This is the aggregations returned from Elasticsearch. + attr_reader :aggregations - # @return [Object] A list object, from the application. This is the aggregations returned from Elasticsearch. - attr_reader :aggregations + # @return [GraphQL::Query::Context] + attr_accessor :context - # @return [GraphQL::Query::Context] - attr_accessor :context + # @return [Int] An integer, from the application. This is the number of results. + attr_reader :total_count - # @return [Int] An integer, from the application. This is the number of results. - attr_reader :total_count + # Raw access to client-provided values. (`max_page_size` not applied to first or last.) + attr_accessor :after_value, :first_value - # Raw access to client-provided values. (`max_page_size` not applied to first or last.) - attr_accessor :after_value, :first_value + # @return [String, nil] the client-provided cursor. `""` is treated as `nil`. + def before + raise PaginationImplementationMissingError, "before is not implemented" + end - # @return [String, nil] the client-provided cursor. `""` is treated as `nil`. - def before - raise PaginationImplementationMissingError, "before is not implemented" + # @return [String, nil] the client-provided cursor. `""` is treated as `nil`. + def after + if defined?(@after) + @after + else + @after = @after_value == "" ? nil : @after_value end + end - # @return [String, nil] the client-provided cursor. `""` is treated as `nil`. - def after - if defined?(@after) - @after - else - @after = @after_value == "" ? nil : @after_value - end - end + # @param items [Object] some unpaginated collection item, like an `Array` or `ActiveRecord::Relation` + # @param context [Query::Context] + # @param first [Integer, nil] The limit parameter from the client, if it provided one + # @param after [String, nil] A cursor for pagination, if the client provided one + # @param max_page_size [Integer, nil] A configured value to cap the result size. Applied as `first` if neither first or last are given. + def initialize( + items, + context: nil, + first: nil, + after: nil, + max_page_size: :nil, + last: nil, + before: nil + ) + @items = items.results + @context = context + @model = items.klass.name + @nodes = items.results.to_a + + @first_value = first + @after_value = decode(after) if after.present? + + @total_count = items.results.total + + # Elasticsearch aggregations + @aggregations = items.aggregations + + # This is only true if the object was _initialized_ with an override + # or if one is assigned later. + @has_max_page_size_override = max_page_size != :not_given + @max_page_size = max_page_size == :not_given ? nil : max_page_size + end - # @param items [Object] some unpaginated collection item, like an `Array` or `ActiveRecord::Relation` - # @param context [Query::Context] - # @param first [Integer, nil] The limit parameter from the client, if it provided one - # @param after [String, nil] A cursor for pagination, if the client provided one - # @param max_page_size [Integer, nil] A configured value to cap the result size. Applied as `first` if neither first or last are given. - def initialize(items, context: nil, first: nil, after: nil, max_page_size: :nil, last: nil, before: nil) - @items = items.results - @context = context - @model = items.klass.name - @nodes = items.results.to_a - - @first_value = first - @after_value = decode(after) if after.present? - - @total_count = items.results.total - - # Elasticsearch aggregations - @aggregations = items.aggregations - - # This is only true if the object was _initialized_ with an override - # or if one is assigned later. - @has_max_page_size_override = max_page_size != :not_given - @max_page_size = if max_page_size == :not_given - nil - else - max_page_size - end - end + def max_page_size=(new_value) + @has_max_page_size_override = true + @max_page_size = new_value + end - def max_page_size=(new_value) - @has_max_page_size_override = true - @max_page_size = new_value + def max_page_size + if @has_max_page_size_override + @max_page_size + else + context.schema.default_max_page_size end + end - def max_page_size - if @has_max_page_size_override - @max_page_size - else - context.schema.default_max_page_size - end - end + def has_max_page_size_override? + @has_max_page_size_override + end - def has_max_page_size_override? - @has_max_page_size_override - end + attr_writer :first, :last - attr_writer :first - # @return [Integer, nil] - # A clamped `first` value. - # (The underlying instance variable doesn't have limits on it.) - # If neither `first` nor `last` is given, but `max_page_size` is present, max_page_size is used for first. - def first - @first ||= begin + # @return [Integer, nil] + # A clamped `first` value. + # (The underlying instance variable doesn't have limits on it.) + # If neither `first` nor `last` is given, but `max_page_size` is present, max_page_size is used for first. + def first + @first ||= + begin capped = limit_pagination_argument(@first_value, max_page_size) - if capped.nil? - capped = max_page_size - end + capped = max_page_size if capped.nil? capped end - end - - attr_writer :last - # @return [Integer, nil] A clamped `last` value. (The underlying instance variable doesn't have limits on it) - def last - raise PaginationImplementationMissingError, "last is not implemented" - end + end - # @return [Array] {nodes}, but wrapped with Edge instances - def edges - @edges ||= nodes.map { |n| self.class.edge_class.new(n, self) } - end + # @return [Integer, nil] A clamped `last` value. (The underlying instance variable doesn't have limits on it) + def last + raise PaginationImplementationMissingError, "last is not implemented" + end - # A dynamic alias for compatibility with {Relay::BaseConnection}. - # @deprecated use {#nodes} instead - def edge_nodes - nodes - end + # @return [Array] {nodes}, but wrapped with Edge instances + def edges + @edges ||= nodes.map { |n| self.class.edge_class.new(n, self) } + end - # The connection object itself implements `PageInfo` fields - def page_info - self - end + # A dynamic alias for compatibility with {Relay::BaseConnection}. + # @deprecated use {#nodes} instead + def edge_nodes + nodes + end - # @return [Boolean] True if there are more items after this page - def has_next_page - nodes.length < total_count # && !(nodes.length < first.to_i) - end + # The connection object itself implements `PageInfo` fields + def page_info + self + end - # @return [Boolean] True if there were items before these items - def has_previous_page - raise PaginationImplementationMissingError, "Implement #{self.class}#has_previous_page to return the previous-page check" - end + # @return [Boolean] True if there are more items after this page + def has_next_page + nodes.length < total_count # && !(nodes.length < first.to_i) + end - # @return [String] The cursor of the first item in {nodes} - def start_cursor - nodes.first && cursor_for(nodes.first) - end + # @return [Boolean] True if there were items before these items + def has_previous_page + raise PaginationImplementationMissingError, + "Implement #{ + self.class + }#has_previous_page to return the previous-page check" + end - # @return [String] The cursor of the last item in {nodes} - def end_cursor - nodes.last && cursor_for(nodes.last) - end + # @return [String] The cursor of the first item in {nodes} + def start_cursor + nodes.first && cursor_for(nodes.first) + end - # Return a cursor for this item. Depends on default sorting of model. - # Taken from Elasticsearch for consistency - # @param item [Object] one of the passed in {items}, taken from {nodes} - # @return [String] - def cursor_for(item) - encode(item[:sort].join(",")) - end + # @return [String] The cursor of the last item in {nodes} + def end_cursor + nodes.last && cursor_for(nodes.last) + end - private + # Return a cursor for this item. Depends on default sorting of model. + # Taken from Elasticsearch for consistency + # @param item [Object] one of the passed in {items}, taken from {nodes} + # @return [String] + def cursor_for(item) + encode(item[:sort].join(",")) + end + private # @param argument [nil, Integer] `first` or `last`, as provided by the client # @param max_page_size [nil, Integer] # @return [nil, Integer] `nil` if the input was `nil`, otherwise a value between `0` and `max_page_size` @@ -201,4 +205,4 @@ def cursor @connection.cursor_for(@item) end end - end +end diff --git a/app/graphql/connections/hash_connection.rb b/app/graphql/connections/hash_connection.rb index 71fec5b0b..746d88eb8 100644 --- a/app/graphql/connections/hash_connection.rb +++ b/app/graphql/connections/hash_connection.rb @@ -1,8 +1,7 @@ # frozen_string_literal: true class HashConnection - class PaginationImplementationMissingError < GraphQL::Error - end + class PaginationImplementationMissingError < GraphQL::Error; end # @return [Class] The class to use for wrapping items as `edges { ... }`. Defaults to `Connection::Edge` def self.edge_class @@ -48,7 +47,15 @@ def after # @param last [Integer, nil] Limit parameter from the client, if provided # @param before [String, nil] A cursor for pagination, if the client provided one. # @param max_page_size [Integer, nil] A configured value to cap the result size. Applied as `first` if neither first or last are given. - def initialize(items, context: nil, first: nil, after: nil, max_page_size: :not_given, last: nil, before: nil) + def initialize( + items, + context: nil, + first: nil, + after: nil, + max_page_size: :not_given, + last: nil, + before: nil + ) @items = items[:data] @context = context @nodes = items[:data] @@ -61,11 +68,7 @@ def initialize(items, context: nil, first: nil, after: nil, max_page_size: :not_ # This is only true if the object was _initialized_ with an override # or if one is assigned later. @has_max_page_size_override = max_page_size != :not_given - @max_page_size = if max_page_size == :not_given - nil - else - max_page_size - end + @max_page_size = max_page_size == :not_given ? nil : max_page_size end def max_page_size=(new_value) @@ -85,22 +88,21 @@ def has_max_page_size_override? @has_max_page_size_override end - attr_writer :first + attr_writer :first, :last + # @return [Integer, nil] # A clamped `first` value. # (The underlying instance variable doesn't have limits on it.) # If neither `first` nor `last` is given, but `max_page_size` is present, max_page_size is used for first. def first - @first ||= begin - capped = limit_pagination_argument(@first_value, max_page_size) - if capped.nil? - capped = max_page_size + @first ||= + begin + capped = limit_pagination_argument(@first_value, max_page_size) + capped = max_page_size if capped.nil? + capped end - capped - end end - attr_writer :last # @return [Integer, nil] A clamped `last` value. (The underlying instance variable doesn't have limits on it) def last raise PaginationImplementationMissingError, "last is not implemented" @@ -124,7 +126,10 @@ def has_next_page # @return [Boolean] True if there were items before these items def has_previous_page - raise PaginationImplementationMissingError, "Implement #{self.class}#has_previous_page to return the previous-page check" + raise PaginationImplementationMissingError, + "Implement #{ + self.class + }#has_previous_page to return the previous-page check" end # @return [String] The cursor of the first item in {nodes} @@ -141,47 +146,49 @@ def end_cursor # @param item [Object] one of the passed in {items}, taken from {nodes} # @return [String] def cursor_for(item) - raise PaginationImplementationMissingError, "Implement #{self.class}#cursor_for(item) to return the cursor for #{item.inspect}" + raise PaginationImplementationMissingError, + "Implement #{self.class}#cursor_for(item) to return the cursor for #{ + item.inspect + }" end private - - # @param argument [nil, Integer] `first` or `last`, as provided by the client - # @param max_page_size [nil, Integer] - # @return [nil, Integer] `nil` if the input was `nil`, otherwise a value between `0` and `max_page_size` - def limit_pagination_argument(argument, max_page_size) - if argument - if argument < 0 - argument = 0 - elsif max_page_size && argument > max_page_size - argument = max_page_size + # @param argument [nil, Integer] `first` or `last`, as provided by the client + # @param max_page_size [nil, Integer] + # @return [nil, Integer] `nil` if the input was `nil`, otherwise a value between `0` and `max_page_size` + def limit_pagination_argument(argument, max_page_size) + if argument + if argument < 0 + argument = 0 + elsif max_page_size && argument > max_page_size + argument = max_page_size + end end + argument end - argument - end - - def decode(cursor) - context.schema.cursor_encoder.decode(cursor, nonce: true) - end - def encode(cursor) - context.schema.cursor_encoder.encode(cursor, nonce: true) - end - - # A wrapper around paginated items. It includes a {cursor} for pagination - # and could be extended with custom relationship-level data. - class Edge - def initialize(item, connection) - @connection = connection - @item = item + def decode(cursor) + context.schema.cursor_encoder.decode(cursor, nonce: true) end - def node - @item + def encode(cursor) + context.schema.cursor_encoder.encode(cursor, nonce: true) end - def cursor - @connection.cursor_for(@item) + # A wrapper around paginated items. It includes a {cursor} for pagination + # and could be extended with custom relationship-level data. + class Edge + def initialize(item, connection) + @connection = connection + @item = item + end + + def node + @item + end + + def cursor + @connection.cursor_for(@item) + end end - end end diff --git a/app/graphql/elasticsearch_loader.rb b/app/graphql/elasticsearch_loader.rb index a26a79786..cc6c7bfd0 100644 --- a/app/graphql/elasticsearch_loader.rb +++ b/app/graphql/elasticsearch_loader.rb @@ -6,7 +6,9 @@ def initialize(model) end def perform(ids) - @model.query(nil, ids: ids).results.each { |record| fulfill(record.uid, record) } + @model.query(nil, ids: ids).results.each do |record| + fulfill(record.uid, record) + end ids.each { |id| fulfill(id, nil) unless fulfilled?(id) } end end diff --git a/app/graphql/lupo_schema.rb b/app/graphql/lupo_schema.rb index ea7865157..90b3e6be8 100644 --- a/app/graphql/lupo_schema.rb +++ b/app/graphql/lupo_schema.rb @@ -5,7 +5,10 @@ class LupoSchema < GraphQL::Schema use GraphQL::Pagination::Connections # custom connection wrapper for Elasticsearch - connections.add(Elasticsearch::Model::Response::Response, ElasticsearchModelResponseConnection) + connections.add( + Elasticsearch::Model::Response::Response, + ElasticsearchModelResponseConnection, + ) # custom connection wrapper for external REST APIs connections.add(Hash, HashConnection) @@ -15,7 +18,7 @@ class LupoSchema < GraphQL::Schema use GraphQL::Batch use GraphQL::Cache - default_max_page_size 1000 + default_max_page_size 1_000 max_depth 10 mutation(MutationType) @@ -28,7 +31,9 @@ class LupoSchema < GraphQL::Schema end rescue_from ActiveRecord::RecordInvalid do |exception| - GraphQL::ExecutionError.new(exception.record.errors.full_messages.join("\n")) + GraphQL::ExecutionError.new( + exception.record.errors.full_messages.join("\n"), + ) end rescue_from CSL::ParseError do |exception| @@ -39,7 +44,12 @@ class LupoSchema < GraphQL::Schema rescue_from StandardError do |exception| Raven.capture_exception(exception) - message = Rails.env.production? ? "We are sorry, but an error has occured. This problem has been logged and support has been notified. Please try again later. If the error persists please contact support." : exception.message + message = + if Rails.env.production? + "We are sorry, but an error has occured. This problem has been logged and support has been notified. Please try again later. If the error persists please contact support." + else + exception.message + end GraphQL::ExecutionError.new(message) end end diff --git a/app/graphql/mutations/base_mutation.rb b/app/graphql/mutations/base_mutation.rb index 8d6ed78f6..84751020d 100644 --- a/app/graphql/mutations/base_mutation.rb +++ b/app/graphql/mutations/base_mutation.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class BaseMutation < GraphQL::Schema::Mutation null false end diff --git a/app/graphql/mutations/create_claim.rb b/app/graphql/mutations/create_claim.rb index 4ad65ef4c..94123ff00 100644 --- a/app/graphql/mutations/create_claim.rb +++ b/app/graphql/mutations/create_claim.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class CreateClaim < BaseMutation argument :doi, ID, required: true argument :id, ID, required: false @@ -7,44 +9,63 @@ class CreateClaim < BaseMutation field :errors, [ErrorType], null: false def resolve(doi: nil, id: nil, source_id: nil) - return { claim: nil, errors: [] } if doi.blank? || context[:current_user].blank? + if doi.blank? || context[:current_user].blank? + return { claim: nil, errors: [] } + end # Use DataCite Claims API call to post claim data = { - "claim" => { "uuid" => id || SecureRandom.uuid, - "orcid" => context[:current_user].uid, - "doi" => doi, - "claim_action" => "create", - "source_id" => source_id || "orcid_update" }, + "claim" => { + "uuid" => id || SecureRandom.uuid, + "orcid" => context[:current_user].uid, + "doi" => doi, + "claim_action" => "create", + "source_id" => source_id || "orcid_update", + }, } - api_url = Rails.env.production? ? "https://api.datacite.org" : "https://api.stage.datacite.org" + api_url = + if Rails.env.production? + "https://api.datacite.org" + else + "https://api.stage.datacite.org" + end url = "#{api_url}/claims" - response = Maremma.post(url, data: data.to_json, content_type: "application/json;charset=UTF-8", bearer: context[:current_user].jwt) + response = + Maremma.post( + url, + data: data.to_json, + content_type: "application/json;charset=UTF-8", + bearer: context[:current_user].jwt, + ) if response.status == 202 - claim = OpenStruct.new( - id: response.body.dig("data", "id"), - type: "claim", - orcid: response.body.dig("data", "attributes", "orcid"), - source_id: response.body.dig("data", "attributes", "sourceId"), - state: response.body.dig("data", "attributes", "state"), - claim_action: response.body.dig("data", "attributes", "claimAction"), - claimed: response.body.dig("data", "attributes", "claimed"), - error_messages: response.body.dig("data", "attributes", "errorMessages"), - ) + claim = + OpenStruct.new( + id: response.body.dig("data", "id"), + type: "claim", + orcid: response.body.dig("data", "attributes", "orcid"), + source_id: response.body.dig("data", "attributes", "sourceId"), + state: response.body.dig("data", "attributes", "state"), + claim_action: response.body.dig("data", "attributes", "claimAction"), + claimed: response.body.dig("data", "attributes", "claimed"), + error_messages: + response.body.dig("data", "attributes", "errorMessages"), + ) - { - claim: claim, - errors: [], - } + { claim: claim, errors: [] } else - errors = response.body["errors"].present? ? ": " + response.body.dig("errors", 0, "title") : "" - Rails.logger.error "Error creating claim for user #{context[:current_user].uid} and doi #{doi}" + errors - { - claim: nil, - errors: response.body["errors"], - } + errors = + if response.body["errors"].present? + ": " + response.body.dig("errors", 0, "title") + else + "" + end + Rails.logger.error "Error creating claim for user #{ + context[:current_user].uid + } and doi #{doi}" + + errors + { claim: nil, errors: response.body["errors"] } end end end diff --git a/app/graphql/mutations/delete_claim.rb b/app/graphql/mutations/delete_claim.rb index 27f853f16..7019881ff 100644 --- a/app/graphql/mutations/delete_claim.rb +++ b/app/graphql/mutations/delete_claim.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DeleteClaim < BaseMutation argument :id, ID, required: true @@ -5,24 +7,35 @@ class DeleteClaim < BaseMutation field :errors, [ErrorType], null: false def resolve(id: nil) - return { claim: nil, errors: [] } if id.blank? || context[:current_user].blank? + if id.blank? || context[:current_user].blank? + return { claim: nil, errors: [] } + end # Use DataCite Claims API call to delete claim - api_url = Rails.env.production? ? "https://api.datacite.org" : "https://api.stage.datacite.org" + api_url = + if Rails.env.production? + "https://api.datacite.org" + else + "https://api.stage.datacite.org" + end url = "#{api_url}/claims/#{id}" response = Maremma.delete(url, bearer: context[:current_user].jwt) if [200, 204].include?(response.status) - { - message: "Claim #{id} deleted.", - errors: [], - } + { message: "Claim #{id} deleted.", errors: [] } else - errors = response.body["errors"].present? ? ": " + response.body.dig("errors", 0, "title") : "" - Rails.logger.error "Error deleting claim id #{id} for user #{context[:current_user].uid}" + errors + errors = + if response.body["errors"].present? + ": " + response.body.dig("errors", 0, "title") + else + "" + end + Rails.logger.error "Error deleting claim id #{id} for user #{ + context[:current_user].uid + }" + + errors { - message: "Error deleting claim #{id}.", - errors: response.body["errors"], + message: "Error deleting claim #{id}.", errors: response.body["errors"] } end end diff --git a/app/graphql/resolvers/base.rb b/app/graphql/resolvers/base.rb index a714d8d66..ebdffef88 100644 --- a/app/graphql/resolvers/base.rb +++ b/app/graphql/resolvers/base.rb @@ -1,2 +1,3 @@ -class Base < GraphQL::Schema::Resolver -end +# frozen_string_literal: true + +class Base < GraphQL::Schema::Resolver; end diff --git a/app/graphql/resolvers/claims.rb b/app/graphql/resolvers/claims.rb index 8404fef99..a67d6a652 100644 --- a/app/graphql/resolvers/claims.rb +++ b/app/graphql/resolvers/claims.rb @@ -7,23 +7,41 @@ def resolve return [] if context[:current_user].blank? # Use DataCite Claims API call to get all ORCID claims for a given DOI - api_url = Rails.env.production? ? "https://api.datacite.org" : "https://api.stage.datacite.org" - url = "#{api_url}/claims?user-id=#{context[:current_user].uid}&dois=#{object.doi.downcase}" + api_url = + if Rails.env.production? + "https://api.datacite.org" + else + "https://api.stage.datacite.org" + end + url = + "#{api_url}/claims?user-id=#{context[:current_user].uid}&dois=#{ + object.doi.downcase + }" response = Maremma.get(url, bearer: context[:current_user].jwt) if response.status != 200 - Rails.logger.error "Error retrieving claims for user #{context[:current_user].uid} and doi #{object.doi.downcase}: " + response.body["errors"].inspect + Rails.logger.error "Error retrieving claims for user #{ + context[:current_user].uid + } and doi #{object.doi.downcase}: " + + response.body["errors"].inspect return [] end - Rails.logger.info "Claims for user #{context[:current_user].uid} and doi #{object.doi.downcase} retrieved: " + response.body["data"].inspect if response.body["data"].present? + if response.body["data"].present? + Rails.logger.info "Claims for user #{ + context[:current_user].uid + } and doi #{object.doi.downcase} retrieved: " + + response.body["data"].inspect + end Array.wrap(response.body.dig("data")).map do |claim| - { id: claim["id"], + { + id: claim["id"], source_id: claim.dig("attributes", "sourceId"), state: claim.dig("attributes", "state"), claim_action: claim.dig("attributes", "claimAction"), claimed: claim.dig("attributes", "claimed"), - error_messages: claim.dig("attributes", "errorMessages") } + error_messages: claim.dig("attributes", "errorMessages"), + } end end end diff --git a/app/graphql/types/actor_item.rb b/app/graphql/types/actor_item.rb index 67e9ad853..52cd0d5f5 100644 --- a/app/graphql/types/actor_item.rb +++ b/app/graphql/types/actor_item.rb @@ -6,10 +6,14 @@ module ActorItem description "Information about people, research organizations and funders" - field :id, ID, null: false, description: "The persistent identifier for the actor." + field :id, + ID, + null: false, description: "The persistent identifier for the actor." field :type, String, null: false, description: "The type of the actor." field :name, String, null: true, description: "The name of the actor." - field :alternate_name, [String], null: true, description: "An alias for the actor." + field :alternate_name, + [String], + null: true, description: "An alias for the actor." definition_methods do # Determine what object type to use for `object` diff --git a/app/graphql/types/address_type.rb b/app/graphql/types/address_type.rb index c1b0324c3..cd2f7eeff 100644 --- a/app/graphql/types/address_type.rb +++ b/app/graphql/types/address_type.rb @@ -6,7 +6,11 @@ class AddressType < BaseObject field :type, String, null: true, description: "The type." field :street_address, String, null: true, description: "The street address." field :postal_code, String, null: true, description: "The postal code." - field :locality, String, null: true, description: "The locality in which the street address is, and which is in the region." + field :locality, + String, + null: true, + description: + "The locality in which the street address is, and which is in the region." field :region, String, null: true, description: "The region." field :country, String, null: true, description: "The country." end diff --git a/app/graphql/types/audiovisual_connection_with_total_type.rb b/app/graphql/types/audiovisual_connection_with_total_type.rb index 0f5c07374..a4cd4a87d 100644 --- a/app/graphql/types/audiovisual_connection_with_total_type.rb +++ b/app/graphql/types/audiovisual_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/base_connection.rb b/app/graphql/types/base_connection.rb index 0d187110d..18189e309 100644 --- a/app/graphql/types/base_connection.rb +++ b/app/graphql/types/base_connection.rb @@ -52,59 +52,70 @@ class BaseConnection < GraphQL::Types::Relay::BaseConnection "ogl-canada-2.0" => "OGL-Canada-2.0", }.freeze - LOWER_BOUND_YEAR = 2010 + LOWER_BOUND_YEAR = 2_010 def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end def orcid_from_url(url) - if /\A(?:(http|https):\/\/(orcid.org)\/)(.+)\z/.match?(url) + if %r{\A(?:(http|https)://(orcid.org)/)(.+)\z}.match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end def facet_by_year(arr) arr.map do |hsh| - { "id" => hsh["key_as_string"][0..3], + { + "id" => hsh["key_as_string"][0..3], "title" => hsh["key_as_string"][0..3], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_key(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => hsh["key"].titleize, - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_resource_type(arr) arr.map do |hsh| - { "id" => hsh["key"].underscore.dasherize, + { + "id" => hsh["key"].underscore.dasherize, "title" => hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_software(arr) arr.map do |hsh| - { "id" => hsh["key"].parameterize(separator: "_"), + { + "id" => hsh["key"].parameterize(separator: "_"), "title" => hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_license(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => LICENSES[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end @@ -112,34 +123,38 @@ def facet_by_combined_key(arr) arr.map do |hsh| id, title = hsh["key"].split(":", 2) - { "id" => id, - "title" => title, - "count" => hsh["doc_count"] } + { "id" => id, "title" => title, "count" => hsh["doc_count"] } end end def facet_by_region(arr) arr.map do |hsh| - { "id" => hsh["key"].downcase, + { + "id" => hsh["key"].downcase, "title" => REGIONS[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_fos(arr) arr.map do |hsh| title = hsh["key"].gsub("FOS: ", "") - { "id" => title.parameterize(separator: "_"), + { + "id" => title.parameterize(separator: "_"), "title" => title, - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_registration_agency(arr) arr.map do |hsh| - { "id" => hsh["key"], + { + "id" => hsh["key"], "title" => REGISTRATION_AGENCIES[hsh["key"]] || hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end @@ -147,19 +162,25 @@ def facet_by_registration_agency(arr) def facet_by_range(arr) interval = Date.current.year - LOWER_BOUND_YEAR - arr.select { |a| a["key_as_string"].to_i <= Date.current.year }[0..interval].map do |hsh| - { "id" => hsh["key_as_string"], + arr.select { |a| a["key_as_string"].to_i <= Date.current.year }[0..interval]. + map do |hsh| + { + "id" => hsh["key_as_string"], "title" => hsh["key_as_string"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_language(arr) arr.map do |hsh| la = ISO_639.find_by(code: hsh["key"]) - { "id" => hsh["key"], - "title" => la.present? ? la.english_name.split(/\W+/).first : hsh["key"], - "count" => hsh["doc_count"] } + { + "id" => hsh["key"], + "title" => + la.present? ? la.english_name.split(/\W+/).first : hsh["key"], + "count" => hsh["doc_count"], + } end end end diff --git a/app/graphql/types/base_enum.rb b/app/graphql/types/base_enum.rb index 7db5c02fd..3ba82a96c 100644 --- a/app/graphql/types/base_enum.rb +++ b/app/graphql/types/base_enum.rb @@ -1,4 +1,3 @@ # frozen_string_literal: true -class BaseEnum < GraphQL::Schema::Enum -end +class BaseEnum < GraphQL::Schema::Enum; end diff --git a/app/graphql/types/base_input_object.rb b/app/graphql/types/base_input_object.rb index edb53c74f..f070ed11a 100644 --- a/app/graphql/types/base_input_object.rb +++ b/app/graphql/types/base_input_object.rb @@ -1,4 +1,3 @@ # frozen_string_literal: true -class BaseInputObject < GraphQL::Schema::InputObject -end +class BaseInputObject < GraphQL::Schema::InputObject; end diff --git a/app/graphql/types/base_object.rb b/app/graphql/types/base_object.rb index 68d641ab2..1459b4304 100644 --- a/app/graphql/types/base_object.rb +++ b/app/graphql/types/base_object.rb @@ -6,36 +6,41 @@ class BaseObject < GraphQL::Schema::Object field_class BaseField def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end def orcid_from_url(url) - if /\A(?:(http|https):\/\/(orcid.org)\/)(.+)\z/.match?(url) + if %r{\A(?:(http|https)://(orcid.org)/)(.+)\z}.match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").upcase + uri.path.gsub(%r{^/}, "").upcase end end def ror_id_from_url(url) - Array(/\A(http|https):\/\/(ror\.org\/0\w{6}\d{2})\z/.match(url)).last + Array(%r{\A(http|https)://(ror\.org/0\w{6}\d{2})\z}.match(url)).last end def facet_by_year(arr) arr.map do |hsh| - { "id" => hsh["key_as_string"][0..3], + { + "id" => hsh["key_as_string"][0..3], "title" => hsh["key_as_string"][0..3], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end def facet_by_resource_type(arr) arr.map do |hsh| - { "id" => hsh["key"].underscore.dasherize, + { + "id" => hsh["key"].underscore.dasherize, "title" => hsh["key"], - "count" => hsh["doc_count"] } + "count" => hsh["doc_count"], + } end end diff --git a/app/graphql/types/base_scalar.rb b/app/graphql/types/base_scalar.rb index cd1bdd801..66c4e1344 100644 --- a/app/graphql/types/base_scalar.rb +++ b/app/graphql/types/base_scalar.rb @@ -1,4 +1,3 @@ # frozen_string_literal: true -class BaseScalar < GraphQL::Schema::Scalar -end +class BaseScalar < GraphQL::Schema::Scalar; end diff --git a/app/graphql/types/base_union.rb b/app/graphql/types/base_union.rb index 6986607a7..1f6b1a2e4 100644 --- a/app/graphql/types/base_union.rb +++ b/app/graphql/types/base_union.rb @@ -1,4 +1,3 @@ # frozen_string_literal: true -class BaseUnion < GraphQL::Schema::Union -end +class BaseUnion < GraphQL::Schema::Union; end diff --git a/app/graphql/types/book_chapter_connection_with_total_type.rb b/app/graphql/types/book_chapter_connection_with_total_type.rb index 1ee54682e..947a575e3 100644 --- a/app/graphql/types/book_chapter_connection_with_total_type.rb +++ b/app/graphql/types/book_chapter_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/book_connection_with_total_type.rb b/app/graphql/types/book_connection_with_total_type.rb index e11b0697d..aaec34ec2 100644 --- a/app/graphql/types/book_connection_with_total_type.rb +++ b/app/graphql/types/book_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/claim_type.rb b/app/graphql/types/claim_type.rb index 87ece36f3..e36351120 100644 --- a/app/graphql/types/claim_type.rb +++ b/app/graphql/types/claim_type.rb @@ -7,9 +7,15 @@ class ClaimType < BaseObject field :type, String, null: false, description: "The type of the item." field :source_id, String, null: false, description: "The source of the claim." field :state, String, null: false, description: "The state of the claim." - field :claim_action, String, null: false, description: "The action for the claim." - field :claimed, GraphQL::Types::ISO8601DateTime, null: true, description: "Date and time when claim was done." - field :error_messages, [ErrorType], null: true, description: "Optional error messages." + field :claim_action, + String, + null: false, description: "The action for the claim." + field :claimed, + GraphQL::Types::ISO8601DateTime, + null: true, description: "Date and time when claim was done." + field :error_messages, + [ErrorType], + null: true, description: "Optional error messages." def type "Claim" diff --git a/app/graphql/types/collection_connection_with_total_type.rb b/app/graphql/types/collection_connection_with_total_type.rb index f151a410c..980dad3e0 100644 --- a/app/graphql/types/collection_connection_with_total_type.rb +++ b/app/graphql/types/collection_connection_with_total_type.rb @@ -20,7 +20,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/conference_paper_connection_with_total_type.rb b/app/graphql/types/conference_paper_connection_with_total_type.rb index 5d26509a3..208eced85 100644 --- a/app/graphql/types/conference_paper_connection_with_total_type.rb +++ b/app/graphql/types/conference_paper_connection_with_total_type.rb @@ -22,7 +22,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/container_type.rb b/app/graphql/types/container_type.rb index 4e6ac6999..9ec18b4fb 100644 --- a/app/graphql/types/container_type.rb +++ b/app/graphql/types/container_type.rb @@ -3,8 +3,14 @@ class ContainerType < BaseObject description "Information about containers for content." - field :identifier_type, String, null: true, hash_key: "identifierType", description: "The type of identifier." - field :identifier, String, null: true, description: "The value of the identifier." + field :identifier_type, + String, + null: true, + hash_key: "identifierType", + description: "The type of identifier." + field :identifier, + String, + null: true, description: "The value of the identifier." field :type, String, null: true, description: "Container type." field :title, String, null: true, description: "Container title." field :volume, String, null: true, description: "Volume." diff --git a/app/graphql/types/contributor_type.rb b/app/graphql/types/contributor_type.rb index 691b4a774..1b96e45eb 100644 --- a/app/graphql/types/contributor_type.rb +++ b/app/graphql/types/contributor_type.rb @@ -5,11 +5,23 @@ class ContributorType < BaseObject field :id, ID, null: true, description: "The ID of the contributor." field :type, String, null: false, description: "The type of the item." - field :contributor_type, String, null: false, description: "The type of the item." + field :contributor_type, + String, + null: false, description: "The type of the item." field :name, String, null: true, description: "The name of the contributor." - field :given_name, String, null: true, description: "Given name. In the U.S., the first name of a person." - field :family_name, String, null: true, description: "Family name. In the U.S., the last name of an person." - field :affiliation, [AffiliationType], null: true, description: "The organizational or institutional affiliation of the contributor." + field :given_name, + String, + null: true, + description: "Given name. In the U.S., the first name of a person." + field :family_name, + String, + null: true, + description: "Family name. In the U.S., the last name of an person." + field :affiliation, + [AffiliationType], + null: true, + description: + "The organizational or institutional affiliation of the contributor." def type case object.name_type diff --git a/app/graphql/types/creator_type.rb b/app/graphql/types/creator_type.rb index df7833835..bee7e3a61 100644 --- a/app/graphql/types/creator_type.rb +++ b/app/graphql/types/creator_type.rb @@ -6,9 +6,19 @@ class CreatorType < BaseObject field :id, ID, null: true, description: "The ID of the creator." field :type, String, null: false, description: "The type of the item." field :name, String, null: true, description: "The name of the creator." - field :given_name, String, null: true, description: "Given name. In the U.S., the first name of a Person." - field :family_name, String, null: true, description: "Family name. In the U.S., the last name of an Person." - field :affiliation, [AffiliationType], null: true, description: "The organizational or institutional affiliation of the creator." + field :given_name, + String, + null: true, + description: "Given name. In the U.S., the first name of a Person." + field :family_name, + String, + null: true, + description: "Family name. In the U.S., the last name of an Person." + field :affiliation, + [AffiliationType], + null: true, + description: + "The organizational or institutional affiliation of the creator." def type case object.name_type diff --git a/app/graphql/types/data_catalog_connection_with_total_type.rb b/app/graphql/types/data_catalog_connection_with_total_type.rb index 6a0fc4653..6bca047a4 100644 --- a/app/graphql/types/data_catalog_connection_with_total_type.rb +++ b/app/graphql/types/data_catalog_connection_with_total_type.rb @@ -12,6 +12,8 @@ def total_count end def dataset_connection_count - @dataset_connection_count ||= Doi.gql_query("client.re3data_id:*", page: { number: 1, size: 0 }).results.total + @dataset_connection_count ||= + Doi.gql_query("client.re3data_id:*", page: { number: 1, size: 0 }).results. + total end end diff --git a/app/graphql/types/data_catalog_type.rb b/app/graphql/types/data_catalog_type.rb index a84739388..8eb077df6 100644 --- a/app/graphql/types/data_catalog_type.rb +++ b/app/graphql/types/data_catalog_type.rb @@ -7,27 +7,66 @@ class DataCatalogType < BaseObject field :type, String, null: false, description: "The type of the item." field :identifier, [IdentifierType], null: true, description: "re3data ID" field :name, String, null: true, description: "The name of the data catalog." - field :alternate_name, [String], null: true, description: "An alias for the data catalog." - field :url, String, null: true, hash_key: "repositoryUrl", description: "URL of the data catalog." - field :contacts, [String], null: true, description: "Repository contact information" - field :description, String, null: true, description: "A description of the data catalog." - field :certificates, [DefinedTermType], null: true, description: "Certificates of the data catalog." - field :subjects, [DefinedTermType], null: true, description: "Subject areas covered by the data catalog." + field :alternate_name, + [String], + null: true, description: "An alias for the data catalog." + field :url, + String, + null: true, + hash_key: "repositoryUrl", + description: "URL of the data catalog." + field :contacts, + [String], + null: true, description: "Repository contact information" + field :description, + String, + null: true, description: "A description of the data catalog." + field :certificates, + [DefinedTermType], + null: true, description: "Certificates of the data catalog." + field :subjects, + [DefinedTermType], + null: true, description: "Subject areas covered by the data catalog." # field :types, [String], null: true, description: "Repository types" # field :content_types, [SchemeType], null: true, description: "Content types" field :provider_types, [String], null: true, description: "Provider types" - field :in_language, [String], null: true, description: "The language of the content of the data catalog." - field :keywords, String, null: true, description: "Keywords or tags used to describe this data catalog. Multiple entries in a keywords list are typically delimited by commas." - field :data_accesses, [TextRestrictionType], null: true, description: "Data accesses" - field :data_uploads, [TextRestrictionType], null: true, description: "Data uploads" + field :in_language, + [String], + null: true, + description: "The language of the content of the data catalog." + field :keywords, + String, + null: true, + description: + "Keywords or tags used to describe this data catalog. Multiple entries in a keywords list are typically delimited by commas." + field :data_accesses, + [TextRestrictionType], + null: true, description: "Data accesses" + field :data_uploads, + [TextRestrictionType], + null: true, description: "Data uploads" field :pid_systems, [String], null: true, description: "PID Systems" # field :apis, [ApiType], null: true, description: "APIs" - field :software_application, [SoftwareApplicationType], null: true, description: "Software" - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." - - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Funded datasets" do + field :software_application, + [SoftwareApplicationType], + null: true, description: "Software" + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." + + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Funded datasets" do argument :query, String, required: false argument :user_id, String, required: false argument :published, String, required: false @@ -45,7 +84,9 @@ class DataCatalogType < BaseObject end def identifier - Array.wrap(object.re3data_id).map { |o| { "name" => "re3data", "value" => "r3d#{o}" } } + Array.wrap(object.re3data_id).map do |o| + { "name" => "re3data", "value" => "r3d#{o}" } + end end def alternate_name @@ -69,9 +110,7 @@ def subjects term_code, name = s["text"].split(" ", 2) { - "term_code" => term_code, - "name" => name, - "in_defined_term_set" => "DFG", + "term_code" => term_code, "name" => name, "in_defined_term_set" => "DFG" } end end @@ -81,22 +120,61 @@ def software_application end def datasets(**args) - Doi.gql_query(args[:query], re3data_id: object[:id], user_id: args[:user_id], client_id: args[:repository_id], resource_type: args[:resource_type], provider_id: args[:member_id], license: args[:license], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], resource_type_id: "Dataset", published: args[:published], state: "findable", page: { cursor: args[:cursor].present? ? Base64.urlsafe_decode64(args[:cursor]) : nil, size: args[:size] }) + Doi.gql_query( + args[:query], + re3data_id: object[:id], + user_id: args[:user_id], + client_id: args[:repository_id], + resource_type: args[:resource_type], + provider_id: args[:member_id], + license: args[:license], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + resource_type_id: "Dataset", + published: args[:published], + state: "findable", + page: { + cursor: + args[:cursor].present? ? Base64.urlsafe_decode64(args[:cursor]) : nil, + size: args[:size], + }, + ) end def view_count - response.results.total.positive? ? aggregate_count(response.response.aggregations.views.buckets) : 0 + if response.results.total.positive? + aggregate_count(response.response.aggregations.views.buckets) + else + 0 + end end def download_count - response.results.total.positive? ? aggregate_count(response.response.aggregations.downloads.buckets) : 0 + if response.results.total.positive? + aggregate_count(response.response.aggregations.downloads.buckets) + else + 0 + end end def citation_count - response.results.total.positive? ? aggregate_count(response.response.aggregations.citations.buckets) : 0 + if response.results.total.positive? + aggregate_count(response.response.aggregations.citations.buckets) + else + 0 + end end def response - @response ||= Doi.gql_query(nil, re3data_id: object[:id], state: "findable", page: { number: 1, size: 0 }) + @response ||= + Doi.gql_query( + nil, + re3data_id: object[:id], + state: "findable", + page: { number: 1, size: 0 }, + ) end end diff --git a/app/graphql/types/data_management_plan_connection_with_total_type.rb b/app/graphql/types/data_management_plan_connection_with_total_type.rb index 5b8f9cbd4..9f022bf45 100644 --- a/app/graphql/types/data_management_plan_connection_with_total_type.rb +++ b/app/graphql/types/data_management_plan_connection_with_total_type.rb @@ -22,7 +22,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/data_paper_connection_with_total_type.rb b/app/graphql/types/data_paper_connection_with_total_type.rb index 4e3911d41..4a85530d3 100644 --- a/app/graphql/types/data_paper_connection_with_total_type.rb +++ b/app/graphql/types/data_paper_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/dataset_connection_with_total_type.rb b/app/graphql/types/dataset_connection_with_total_type.rb index caf276427..1036075d8 100644 --- a/app/graphql/types/dataset_connection_with_total_type.rb +++ b/app/graphql/types/dataset_connection_with_total_type.rb @@ -29,7 +29,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories @@ -53,26 +55,63 @@ def languages end def dataset_connection_count - @dataset_connection_count ||= Event.query(nil, citation_type: "Dataset-Dataset", page: { number: 1, size: 0 }).results.total + @dataset_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-Dataset", page: { number: 1, size: 0 }, + ). + results. + total end def publication_connection_count - @publication_connection_count ||= Event.query(nil, citation_type: "Dataset-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @publication_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-ScholarlyArticle", page: { number: 1, size: 0 }, + ). + results. + total end def software_connection_count - @software_connection_count ||= Event.query(nil, citation_type: "Dataset-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + @software_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-SoftwareSourceCode", + page: { number: 1, size: 0 }, + ). + results. + total end def person_connection_count - @person_connection_count ||= Event.query(nil, citation_type: "Dataset-Person", page: { number: 1, size: 0 }).results.total + @person_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-Person", page: { number: 1, size: 0 }, + ). + results. + total end def funder_connection_count - @funder_connection_count ||= Event.query(nil, citation_type: "Dataset-Funder", page: { number: 1, size: 0 }).results.total + @funder_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-Funder", page: { number: 1, size: 0 }, + ). + results. + total end def organization_connection_count - @organization_connection_count ||= Event.query(nil, citation_type: "Dataset-Organization", page: { number: 1, size: 0 }).results.total + @organization_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-Organization", page: { number: 1, size: 0 }, + ). + results. + total end end diff --git a/app/graphql/types/dataset_type.rb b/app/graphql/types/dataset_type.rb index f41e54819..a0af13e2e 100644 --- a/app/graphql/types/dataset_type.rb +++ b/app/graphql/types/dataset_type.rb @@ -3,14 +3,20 @@ class DatasetType < BaseObject implements DoiItem - field :usage_reports, DatasetUsageReportConnectionWithTotalType, null: false, description: "Usage reports for this dataset", connection: true do + field :usage_reports, + DatasetUsageReportConnectionWithTotalType, + null: false, + description: "Usage reports for this dataset", + connection: true do argument :first, Int, required: false, default_value: 25 end def usage_reports(**args) - ids = Event.query(nil, obj_id: object.id).results.to_a.map do |e| - e[:subj_id] - end - UsageReport.find_by_id(ids, page: { number: 1, size: args[:first] }).fetch(:data, []) + ids = + Event.query(nil, obj_id: object.id).results.to_a.map { |e| e[:subj_id] } + UsageReport.find_by_id(ids, page: { number: 1, size: args[:first] }).fetch( + :data, + [], + ) end end diff --git a/app/graphql/types/date_type.rb b/app/graphql/types/date_type.rb index b7603322c..16dabf027 100644 --- a/app/graphql/types/date_type.rb +++ b/app/graphql/types/date_type.rb @@ -3,8 +3,12 @@ class DateType < BaseObject description "Information about dates" - field :date, String, null: false, description: "Date information for this resource" - field :date_type, String, null: true, hash_key: "dateType", description: "The type of date" + field :date, + String, + null: false, description: "Date information for this resource" + field :date_type, + String, + null: true, hash_key: "dateType", description: "The type of date" end # Acceptable values for date_type are from the DataCite Metadata Schema: diff --git a/app/graphql/types/defined_term_type.rb b/app/graphql/types/defined_term_type.rb index 61fe0b937..2ceb46a20 100644 --- a/app/graphql/types/defined_term_type.rb +++ b/app/graphql/types/defined_term_type.rb @@ -3,8 +3,16 @@ class DefinedTermType < BaseObject description "A word, name, acronym, phrase, etc. with a formal definition. Often used in the context of category or subject classification, glossaries or dictionaries, product or creative work types, etc." - field :term_code, String, null: true, description: "A code that identifies this DefinedTerm within a DefinedTermSet." + field :term_code, + String, + null: true, + description: + "A code that identifies this DefinedTerm within a DefinedTermSet." field :name, String, null: true, description: "The name of the item." - field :description, String, null: true, description: "A description of the item." - field :in_defined_term_set, String, null: true, description: "A DefinedTermSet that contains this term." + field :description, + String, + null: true, description: "A description of the item." + field :in_defined_term_set, + String, + null: true, description: "A DefinedTermSet that contains this term." end diff --git a/app/graphql/types/description_type.rb b/app/graphql/types/description_type.rb index ef3f66de1..c7f22e0b0 100644 --- a/app/graphql/types/description_type.rb +++ b/app/graphql/types/description_type.rb @@ -4,6 +4,8 @@ class DescriptionType < BaseObject description "Information about descriptions" field :description, String, null: true, description: "Description" - field :description_type, String, null: true, hash_key: "descriptionType", description: "Description type" + field :description_type, + String, + null: true, hash_key: "descriptionType", description: "Description type" field :lang, ID, null: true, description: "Language" end diff --git a/app/graphql/types/dissertation_connection_with_total_type.rb b/app/graphql/types/dissertation_connection_with_total_type.rb index 9f6159395..0b2a4f1c2 100644 --- a/app/graphql/types/dissertation_connection_with_total_type.rb +++ b/app/graphql/types/dissertation_connection_with_total_type.rb @@ -22,7 +22,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/doi_item.rb b/app/graphql/types/doi_item.rb index da5e2e92e..f04d9b9af 100644 --- a/app/graphql/types/doi_item.rb +++ b/app/graphql/types/doi_item.rb @@ -18,65 +18,187 @@ module DoiItem description "Information about DOIs" - field :id, ID, null: false, hash_key: "identifier", description: "The persistent identifier for the resource" + field :id, + ID, + null: false, + hash_key: "identifier", + description: "The persistent identifier for the resource" field :type, String, null: false, description: "The type of the item." - field :doi, String, null: false, hash_key: "uid", description: "The DOI for the resource." - field :creators, [CreatorType], null: true, description: "The main researchers involved in producing the data, or the authors of the publication, in priority order." do + field :doi, + String, + null: false, hash_key: "uid", description: "The DOI for the resource." + field :creators, + [CreatorType], + null: true, + description: + "The main researchers involved in producing the data, or the authors of the publication, in priority order." do argument :first, Int, required: false, default_value: 20 end - field :contributors, [ContributorType], null: true, description: "The institution or person responsible for collecting, managing, distributing, or otherwise contributing to the development of the resource." do + field :contributors, + [ContributorType], + null: true, + description: + "The institution or person responsible for collecting, managing, distributing, or otherwise contributing to the development of the resource." do argument :first, Int, required: false, default_value: 20 argument :contributor_type, String, required: false end - field :titles, [TitleType], null: true, description: "A name or title by which a resource is known." do + field :titles, + [TitleType], + null: true, + description: "A name or title by which a resource is known." do argument :first, Int, required: false, default_value: 5 end - field :publication_year, Int, null: true, description: "The year when the data was or will be made publicly available" - field :publisher, String, null: true, description: "The name of the entity that holds, archives, publishes prints, distributes, releases, issues, or produces the resource" - field :subjects, [SubjectType], null: true, description: "Subject, keyword, classification code, or key phrase describing the resource" - field :fields_of_science, [FieldOfScienceType], null: true, description: "OECD Fields of Science of the resource" - field :dates, [DateType], null: true, description: "Different dates relevant to the work" - field :registered, GraphQL::Types::ISO8601DateTime, null: true, description: "DOI registration date" - field :language, LanguageType, null: true, description: "The primary language of the resource" - field :identifiers, [IdentifierType], null: true, description: "An identifier or identifiers applied to the resource being registered" - field :related_identifiers, [RelatedIdentifierType], null: true, description: "Identifiers of related resources. These must be globally unique identifiers" + field :publication_year, + Int, + null: true, + description: + "The year when the data was or will be made publicly available" + field :publisher, + String, + null: true, + description: + "The name of the entity that holds, archives, publishes prints, distributes, releases, issues, or produces the resource" + field :subjects, + [SubjectType], + null: true, + description: + "Subject, keyword, classification code, or key phrase describing the resource" + field :fields_of_science, + [FieldOfScienceType], + null: true, description: "OECD Fields of Science of the resource" + field :dates, + [DateType], + null: true, description: "Different dates relevant to the work" + field :registered, + GraphQL::Types::ISO8601DateTime, + null: true, description: "DOI registration date" + field :language, + LanguageType, + null: true, description: "The primary language of the resource" + field :identifiers, + [IdentifierType], + null: true, + description: + "An identifier or identifiers applied to the resource being registered" + field :related_identifiers, + [RelatedIdentifierType], + null: true, + description: + "Identifiers of related resources. These must be globally unique identifiers" field :types, ResourceTypeType, null: false, description: "The resource type" - field :formats, [String], null: true, description: "Technical format of the resource" - field :sizes, [String], null: true, description: "Size (e.g. bytes, pages, inches, etc.) or duration (extent), e.g. hours, minutes, days, etc., of a resource" - field :version, String, null: true, hash_key: "version_info", description: "The version number of the resource" - field :rights, [RightsType], null: true, description: "Any rights information for this resource" - field :descriptions, [DescriptionType], null: true, description: "All additional information that does not fit in any of the other categories" do + field :formats, + [String], + null: true, description: "Technical format of the resource" + field :sizes, + [String], + null: true, + description: + "Size (e.g. bytes, pages, inches, etc.) or duration (extent), e.g. hours, minutes, days, etc., of a resource" + field :version, + String, + null: true, + hash_key: "version_info", + description: "The version number of the resource" + field :rights, + [RightsType], + null: true, description: "Any rights information for this resource" + field :descriptions, + [DescriptionType], + null: true, + description: + "All additional information that does not fit in any of the other categories" do argument :first, Int, required: false, default_value: 5 end - field :container, ContainerType, null: true, description: "The container (e.g. journal or repository) hosting the resource." - field :geolocations, [GeolocationType], null: true, hash_key: "geo_locations", description: "Spatial region or named place where the data was gathered or about which the data is focused." - field :funding_references, [FundingType], null: true, description: "Information about financial support (funding) for the resource being registered" - field :url, Url, null: true, description: "The URL registered for the resource" - field :content_url, resolver: ContentUrl, null: true, description: "Url to download the content directly, if available" - field :repository, RepositoryType, null: true, hash_key: "client", description: "The repository account managing this resource" - field :member, MemberType, null: true, hash_key: "provider", description: "The member account managing this resource" - field :registration_agency, RegistrationAgencyType, hash_key: "agency", null: true, description: "The DOI registration agency for the resource" - field :formatted_citation, String, null: true, description: "Metadata as formatted citation" do + field :container, + ContainerType, + null: true, + description: + "The container (e.g. journal or repository) hosting the resource." + field :geolocations, + [GeolocationType], + null: true, + hash_key: "geo_locations", + description: + "Spatial region or named place where the data was gathered or about which the data is focused." + field :funding_references, + [FundingType], + null: true, + description: + "Information about financial support (funding) for the resource being registered" + field :url, + Url, + null: true, description: "The URL registered for the resource" + field :content_url, + resolver: ContentUrl, + null: true, + description: "Url to download the content directly, if available" + field :repository, + RepositoryType, + null: true, + hash_key: "client", + description: "The repository account managing this resource" + field :member, + MemberType, + null: true, + hash_key: "provider", + description: "The member account managing this resource" + field :registration_agency, + RegistrationAgencyType, + hash_key: "agency", + null: true, + description: "The DOI registration agency for the resource" + field :formatted_citation, + String, + null: true, description: "Metadata as formatted citation" do argument :style, String, required: false, default_value: "apa" argument :locale, String, required: false, default_value: "en-US" end - field :xml, String, null: false, description: "Metadata in DataCite XML format." + field :xml, + String, + null: false, description: "Metadata in DataCite XML format." field :bibtex, String, null: false, description: "Metadata in bibtex format" - field :schema_org, GraphQL::Types::JSON, null: false, description: "Metadata in schema.org format" - field :claims, resolver: Claims, null: true, description: "Claims to ORCID made for this DOI." - field :reference_count, Int, null: true, description: "Total number of references" - field :citation_count, Int, null: true, description: "Total number of citations" + field :schema_org, + GraphQL::Types::JSON, + null: false, description: "Metadata in schema.org format" + field :claims, + resolver: Claims, + null: true, + description: "Claims to ORCID made for this DOI." + field :reference_count, + Int, + null: true, description: "Total number of references" + field :citation_count, + Int, + null: true, description: "Total number of citations" field :view_count, Int, null: true, description: "Total number of views" - field :download_count, Int, null: true, description: "Total number of downloads" + field :download_count, + Int, + null: true, description: "Total number of downloads" field :version_count, Int, null: true, description: "Total number of versions" - field :version_of_count, Int, null: true, description: "Total number of DOIs the resource is a version of" + field :version_of_count, + Int, + null: true, + description: "Total number of DOIs the resource is a version of" field :part_count, Int, null: true, description: "Total number of parts" - field :part_of_count, Int, null: true, description: "Total number of DOIs the resource is a part of" - field :citations_over_time, [YearTotalType], null: true, description: "Citations by year" - field :views_over_time, [YearMonthTotalType], null: true, description: "Views by month" - field :downloads_over_time, [YearMonthTotalType], null: true, description: "Downloads by month" - - field :references, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "References for this DOI" do + field :part_of_count, + Int, + null: true, + description: "Total number of DOIs the resource is a part of" + field :citations_over_time, + [YearTotalType], + null: true, description: "Citations by year" + field :views_over_time, + [YearMonthTotalType], + null: true, description: "Views by month" + field :downloads_over_time, + [YearMonthTotalType], + null: true, description: "Downloads by month" + + field :references, + WorkConnectionWithTotalType, + null: true, + max_page_size: 100, + description: "References for this DOI" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -104,7 +226,11 @@ module DoiItem argument :after, String, required: false end - field :citations, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "Citations for this DOI." do + field :citations, + WorkConnectionWithTotalType, + null: true, + max_page_size: 100, + description: "Citations for this DOI." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -132,7 +258,9 @@ module DoiItem argument :after, String, required: false end - field :parts, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "Parts of this DOI." do + field :parts, + WorkConnectionWithTotalType, + null: true, max_page_size: 100, description: "Parts of this DOI." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -160,7 +288,11 @@ module DoiItem argument :after, String, required: false end - field :part_of, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "The DOI is a part of this DOI." do + field :part_of, + WorkConnectionWithTotalType, + null: true, + max_page_size: 100, + description: "The DOI is a part of this DOI." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -188,7 +320,9 @@ module DoiItem argument :after, String, required: false end - field :versions, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "Versions of this DOI." do + field :versions, + WorkConnectionWithTotalType, + null: true, max_page_size: 100, description: "Versions of this DOI." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -216,7 +350,11 @@ module DoiItem argument :after, String, required: false end - field :version_of, WorkConnectionWithTotalType, null: true, max_page_size: 100, description: "The DOI is a version of this DOI." do + field :version_of, + WorkConnectionWithTotalType, + null: true, + max_page_size: 100, + description: "The DOI is a version of this DOI." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -266,55 +404,60 @@ def language def registration_agency return {} if object.agency.blank? - { - id: object.agency, - name: REGISTRATION_AGENCIES[object.agency], - }.compact + { id: object.agency, name: REGISTRATION_AGENCIES[object.agency] }.compact end def fields_of_science - Array.wrap(object.subjects). - select { |s| s["subjectScheme"] == "Fields of Science and Technology (FOS)" }. - map do |s| - name = s["subject"].gsub("FOS: ", "") - { - "id" => name.parameterize(separator: "_"), - "name" => name, - } - end.uniq + Array.wrap(object.subjects).select do |s| + s["subjectScheme"] == "Fields of Science and Technology (FOS)" + end.map do |s| + name = s["subject"].gsub("FOS: ", "") + { "id" => name.parameterize(separator: "_"), "name" => name } + end.uniq end def creators(**args) Array.wrap(object.creators)[0...args[:first]].map do |c| Hashie::Mash.new( - "id" => c.fetch("nameIdentifiers", []).detect { |n| %w(ORCID ROR).include?(n.fetch("nameIdentifierScheme", nil)) }.to_h.fetch("nameIdentifier", nil), + "id" => + c.fetch("nameIdentifiers", []).detect do |n| + %w[ORCID ROR].include?(n.fetch("nameIdentifierScheme", nil)) + end.to_h. + fetch("nameIdentifier", nil), "name_type" => c.fetch("nameType", nil), "name" => c.fetch("name", nil), "given_name" => c.fetch("givenName", nil), "family_name" => c.fetch("familyName", nil), - "affiliation" => c.fetch("affiliation", []).map do |a| - { "id" => a["affiliationIdentifier"], - "name" => a["name"] }.compact - end, + "affiliation" => + c.fetch("affiliation", []).map do |a| + { "id" => a["affiliationIdentifier"], "name" => a["name"] }.compact + end, ) end end def contributors(**args) contrib = Array.wrap(object.contributors)[0...args[:first]] - contrib = contrib.select { |c| c["contributorType"] == args[:contributor_type] } if args[:contributor_type].present? + if args[:contributor_type].present? + contrib = + contrib.select { |c| c["contributorType"] == args[:contributor_type] } + end contrib.map do |c| Hashie::Mash.new( - "id" => c.fetch("nameIdentifiers", []).detect { |n| %w(ORCID ROR).include?(n.fetch("nameIdentifierScheme", nil)) }.to_h.fetch("nameIdentifier", nil), + "id" => + c.fetch("nameIdentifiers", []).detect do |n| + %w[ORCID ROR].include?(n.fetch("nameIdentifierScheme", nil)) + end.to_h. + fetch("nameIdentifier", nil), "contributor_type" => c.fetch("contributorType", nil), "name_type" => c.fetch("nameType", nil), "name" => c.fetch("name", nil), "given_name" => c.fetch("givenName", nil), "family_name" => c.fetch("familyName", nil), - "affiliation" => c.fetch("affiliation", []).map do |a| - { "id" => a["affiliationIdentifier"], - "name" => a["name"] }.compact - end, + "affiliation" => + c.fetch("affiliation", []).map do |a| + { "id" => a["affiliationIdentifier"], "name" => a["name"] }.compact + end, ) end end @@ -328,11 +471,17 @@ def descriptions(first: nil) end def identifiers - Array.wrap(object.identifiers).select { |r| [doi_from_url(object.doi), object.url].compact.exclude?(r["identifier"]) } + Array.wrap(object.identifiers).select do |r| + [doi_from_url(object.doi), object.url].compact.exclude?(r["identifier"]) + end end def bibtex - pages = object.container.to_h["firstPage"].present? ? [object.container["firstPage"], object.container["lastPage"]].compact.join("-") : nil + pages = + if object.container.to_h["firstPage"].present? + [object.container["firstPage"], object.container["lastPage"]].compact. + join("-") + end bib = { bibtex_type: object.types["bibtex"].presence || "misc", @@ -340,7 +489,12 @@ def bibtex doi: object.doi, url: object.url, author: authors_as_string(object.creators), - keywords: object.subjects.present? ? Array.wrap(object.subjects).map { |k| parse_attributes(k, content: "subject", first: true) }.join(", ") : nil, + keywords: + if object.subjects.present? + Array.wrap(object.subjects).map do |k| + parse_attributes(k, content: "subject", first: true) + end.join(", ") + end, language: object.language, title: parse_attributes(object.titles, content: "title", first: true), journal: object.container && object.container["title"], @@ -364,14 +518,26 @@ def schema_org "@id" => normalize_doi(object.doi), "identifier" => to_schema_org_identifiers(object.identifiers), "url" => object.url, - "additionalType" => object.types.present? ? object.types["resourceType"] : nil, + "additionalType" => + object.types.present? ? object.types["resourceType"] : nil, "name" => parse_attributes(object.titles, content: "title", first: true), "author" => to_schema_org_creators(object.creators), "editor" => to_schema_org_contributors(object.contributors), - "description" => parse_attributes(object.descriptions, content: "description", first: true), - "license" => Array.wrap(object.rights_list).map { |l| l["rightsUri"] }.compact.unwrap, + "description" => + parse_attributes( + object.descriptions, + content: "description", first: true, + ), + "license" => + Array.wrap(object.rights_list).map { |l| l["rightsUri"] }.compact. + unwrap, "version" => object.version_info, - "keywords" => object.subjects.present? ? Array.wrap(object.subjects).map { |k| parse_attributes(k, content: "subject", first: true) }.join(", ") : nil, + "keywords" => + if object.subjects.present? + Array.wrap(object.subjects).map do |k| + parse_attributes(k, content: "subject", first: true) + end.join(", ") + end, "inLanguage" => object.language, "contentSize" => Array.wrap(object.sizes).unwrap, "encodingFormat" => Array.wrap(object.formats).unwrap, @@ -381,45 +547,108 @@ def schema_org "pageStart" => object.container.to_h["firstPage"], "pageEnd" => object.container.to_h["lastPage"], "spatialCoverage" => to_schema_org_spatial_coverage(object.geo_locations), - "sameAs" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "IsIdenticalTo"), - "isPartOf" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "IsPartOf"), - "hasPart" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "HasPart"), - "predecessor_of" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "IsPreviousVersionOf"), - "successor_of" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "IsNewVersionOf"), - "citation" => to_schema_org_relation(related_identifiers: object.related_identifiers, relation_type: "References"), + "sameAs" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "IsIdenticalTo", + ), + "isPartOf" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "IsPartOf", + ), + "hasPart" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "HasPart", + ), + "predecessor_of" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "IsPreviousVersionOf", + ), + "successor_of" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "IsNewVersionOf", + ), + "citation" => + to_schema_org_relation( + related_identifiers: object.related_identifiers, + relation_type: "References", + ), "@reverse" => reverse.presence, "contentUrl" => Array.wrap(object.content_url).unwrap, "schemaVersion" => object.schema_version, - "periodical" => object.types.present? ? ((object.types["schemaOrg"] != "Dataset") && object.container.present? ? to_schema_org(object.container) : nil) : nil, - "includedInDataCatalog" => object.types.present? ? ((object.types["schemaOrg"] == "Dataset") && object.container.present? ? to_schema_org_container(object.container, type: "Dataset") : nil) : nil, - "publisher" => object.publisher.present? ? { "@type" => "Organization", "name" => object.publisher } : nil, + "periodical" => + if object.types.present? + ( + if (object.types["schemaOrg"] != "Dataset") && + object.container.present? + to_schema_org(object.container) + end + ) + end, + "includedInDataCatalog" => + if object.types.present? + ( + if (object.types["schemaOrg"] == "Dataset") && + object.container.present? + to_schema_org_container(object.container, type: "Dataset") + end + ) + end, + "publisher" => + if object.publisher.present? + { "@type" => "Organization", "name" => object.publisher } + end, "funder" => to_schema_org_funder(object.funding_references), - "provider" => object.agency.present? ? { "@type" => "Organization", "name" => object.agency } : nil, + "provider" => + if object.agency.present? + { "@type" => "Organization", "name" => object.agency } + end, }.compact JSON.pretty_generate hsh end def reverse - { "citation" => Array.wrap(object.related_identifiers).select { |ri| ri["relationType"] == "IsReferencedBy" }.map do |r| - { "@id" => normalize_doi(r["relatedIdentifier"]), - "@type" => r["resourceTypeGeneral"] || "ScholarlyArticle", - "identifier" => r["relatedIdentifierType"] == "DOI" ? nil : to_identifier(r) }.compact - end.unwrap, - "isBasedOn" => Array.wrap(object.related_identifiers).select { |ri| ri["relationType"] == "IsSupplementTo" }.map do |r| - { "@id" => normalize_doi(r["relatedIdentifier"]), - "@type" => r["resourceTypeGeneral"] || "ScholarlyArticle", - "identifier" => r["relatedIdentifierType"] == "DOI" ? nil : to_identifier(r) }.compact - end.unwrap }.compact + { + "citation" => + Array.wrap(object.related_identifiers).select do |ri| + ri["relationType"] == "IsReferencedBy" + end.map do |r| + { + "@id" => normalize_doi(r["relatedIdentifier"]), + "@type" => r["resourceTypeGeneral"] || "ScholarlyArticle", + "identifier" => + r["relatedIdentifierType"] == "DOI" ? nil : to_identifier(r), + }.compact + end.unwrap, + "isBasedOn" => + Array.wrap(object.related_identifiers).select do |ri| + ri["relationType"] == "IsSupplementTo" + end.map do |r| + { + "@id" => normalize_doi(r["relatedIdentifier"]), + "@type" => r["resourceTypeGeneral"] || "ScholarlyArticle", + "identifier" => + r["relatedIdentifierType"] == "DOI" ? nil : to_identifier(r), + }.compact + end.unwrap, + }.compact end # defaults to style: apa and locale: en-US def formatted_citation(style: nil, locale: nil) - cp = CiteProc::Processor.new(style: style || "apa", locale: locale || "en-US", format: "html") + cp = + CiteProc::Processor.new( + style: style || "apa", locale: locale || "en-US", format: "html", + ) cp.import Array.wrap(citeproc_hsh) bibliography = cp.render :bibliography, id: normalize_doi(object.doi) url = object.doi - unless /^https?:\/\//i.match?(object.doi) + unless %r{^https?://}i.match?(object.doi) url = "https://doi.org/#{object.doi}" end bibliography.first.gsub(url, doi_link(url)) @@ -427,39 +656,87 @@ def formatted_citation(style: nil, locale: nil) def references(**args) args[:ids] = object.reference_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def citations(**args) args[:ids] = object.citation_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def parts(**args) args[:ids] = object.part_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def part_of(**args) args[:ids] = object.part_of_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def versions(**args) args[:ids] = object.version_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def version_of(**args) args[:ids] = object.version_of_ids - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def response(**args) # make sure no dois are returnded if there are no :ids args[:ids] = "999" if args[:ids].blank? - Doi.gql_query(args[:query], ids: args[:ids], user_id: args[:user_id], client_id: args[:repository_id], provider_id: args[:member_id], resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], published: args[:published], agency: args[:registration_agency], language: args[:language], license: args[:license], has_person: args[:has_person], has_funder: args[:has_funder], has_organization: args[:has_organization], has_affiliation: args[:has_affiliation], has_member: args[:has_member], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], pid_entity: args[:pid_entity], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + ids: args[:ids], + user_id: args[:user_id], + client_id: args[:repository_id], + provider_id: args[:member_id], + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + published: args[:published], + agency: args[:registration_agency], + language: args[:language], + license: args[:license], + has_person: args[:has_person], + has_funder: args[:has_funder], + has_organization: args[:has_organization], + has_affiliation: args[:has_affiliation], + has_member: args[:has_member], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + pid_entity: args[:pid_entity], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end def doi_link(url) @@ -467,29 +744,49 @@ def doi_link(url) end def citeproc_hsh - page = object.container.to_h["firstPage"].present? ? [object.container["firstPage"], object.container["lastPage"]].compact.join("-") : nil - if Array.wrap(object.creators).size == 1 && Array.wrap(object.creators).first.fetch("name", nil) == ":(unav)" + page = + if object.container.to_h["firstPage"].present? + [object.container["firstPage"], object.container["lastPage"]].compact. + join("-") + end + if Array.wrap(object.creators).size == 1 && + Array.wrap(object.creators).first.fetch("name", nil) == ":(unav)" author = nil else author = to_citeproc(object.creators) end - if object.types["resourceTypeGeneral"] == "Software" && object.version_info.present? - citeproc_type = "book" + citeproc_type = if object.types["resourceTypeGeneral"] == "Software" && + object.version_info.present? + "book" else - citeproc_type = object.types["citeproc"] + object.types["citeproc"] end { "type" => citeproc_type, "id" => normalize_doi(object.doi), - "categories" => Array.wrap(object.subjects).map { |k| parse_attributes(k, content: "subject", first: true) }.presence, + "categories" => + Array.wrap(object.subjects).map do |k| + parse_attributes(k, content: "subject", first: true) + end.presence, "language" => object.language, "author" => author, "contributor" => to_citeproc(object.contributors), - "issued" => get_date(object.dates, "Issued") ? get_date_parts(get_date(object.dates, "Issued")) : nil, - "submitted" => Array.wrap(object.dates).detect { |d| d["dateType"] == "Submitted" }.to_h.fetch("__content__", nil), - "abstract" => parse_attributes(object.descriptions, content: "description", first: true), + "issued" => + if get_date(object.dates, "Issued") + get_date_parts(get_date(object.dates, "Issued")) + end, + "submitted" => + Array.wrap(object.dates).detect do |d| + d["dateType"] == "Submitted" + end.to_h. + fetch("__content__", nil), + "abstract" => + parse_attributes( + object.descriptions, + content: "description", first: true, + ), "container-title" => object.container.to_h["title"], "DOI" => object.doi, "volume" => object.container.to_h["volume"], @@ -499,6 +796,7 @@ def citeproc_hsh "title" => parse_attributes(object.titles, content: "title", first: true), "URL" => object.url, "version" => object.version_info, - }.compact.symbolize_keys + }.compact. + symbolize_keys end end diff --git a/app/graphql/types/employment_type.rb b/app/graphql/types/employment_type.rb index a5d0a6427..058915d5e 100644 --- a/app/graphql/types/employment_type.rb +++ b/app/graphql/types/employment_type.rb @@ -3,9 +3,19 @@ class EmploymentType < BaseObject description "Information about employments" - field :organization_id, String, null: true, description: "The organization ID of the employment." - field :organization_name, String, null: false, description: "The organization name of the employment." - field :role_title, String, null: true, description: "The role title of the employment." - field :start_date, GraphQL::Types::ISO8601DateTime, null: true, description: "Employment start date." - field :end_date, GraphQL::Types::ISO8601DateTime, null: true, description: "Employment end date." + field :organization_id, + String, + null: true, description: "The organization ID of the employment." + field :organization_name, + String, + null: false, description: "The organization name of the employment." + field :role_title, + String, + null: true, description: "The role title of the employment." + field :start_date, + GraphQL::Types::ISO8601DateTime, + null: true, description: "Employment start date." + field :end_date, + GraphQL::Types::ISO8601DateTime, + null: true, description: "Employment end date." end diff --git a/app/graphql/types/event_connection_with_total_type.rb b/app/graphql/types/event_connection_with_total_type.rb index 1cfeefe93..1e64387ba 100644 --- a/app/graphql/types/event_connection_with_total_type.rb +++ b/app/graphql/types/event_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/event_data_edge.rb b/app/graphql/types/event_data_edge.rb index 60bfeebc4..ab17cfc18 100644 --- a/app/graphql/types/event_data_edge.rb +++ b/app/graphql/types/event_data_edge.rb @@ -9,11 +9,16 @@ class EventDataEdge < GraphQL::Relay::Edge }.freeze def event_data - @event_data ||= begin - return nil if node.blank? - - Event.query(nil, subj_id: doi_from_node(node), obj_id: parent.id).results.first.to_h.fetch("_source", nil) - end + @event_data ||= + begin + return nil if node.blank? + + Event.query(nil, subj_id: doi_from_node(node), obj_id: parent.id). + results. + first. + to_h. + fetch("_source", nil) + end end def source_id @@ -29,7 +34,9 @@ def source end def relation_type - event_data.relation_type_id.underscore.camelcase(:lower) if event_data.present? + if event_data.present? + event_data.relation_type_id.underscore.camelcase(:lower) + end end def total diff --git a/app/graphql/types/event_data_edge_type.rb b/app/graphql/types/event_data_edge_type.rb index dc6453aa1..1d7589902 100644 --- a/app/graphql/types/event_data_edge_type.rb +++ b/app/graphql/types/event_data_edge_type.rb @@ -3,9 +3,15 @@ class EventDataEdgeType < GraphQL::Types::Relay::BaseEdge node_type(EventDataType) - field :source_id, String, null: true, description: "The source ID of the event." - field :target_id, String, null: true, description: "The target ID of the event." + field :source_id, + String, + null: true, description: "The source ID of the event." + field :target_id, + String, + null: true, description: "The target ID of the event." field :source, String, null: true, description: "Source for this event" - field :relation_type, String, null: true, description: "Relation type for this event." + field :relation_type, + String, + null: true, description: "Relation type for this event." field :total, Integer, null: true, description: "Total count for this event." end diff --git a/app/graphql/types/funder_connection_with_total_type.rb b/app/graphql/types/funder_connection_with_total_type.rb index d70c89024..f62797407 100644 --- a/app/graphql/types/funder_connection_with_total_type.rb +++ b/app/graphql/types/funder_connection_with_total_type.rb @@ -14,14 +14,17 @@ def total_count end def publication_connection_count - @publication_connection_count ||= Event.query(nil, citation_type: "Funder-ScholarlyArticle").results.total + @publication_connection_count ||= + Event.query(nil, citation_type: "Funder-ScholarlyArticle").results.total end def dataset_connection_count - @dataset_connection_count ||= Event.query(nil, citation_type: "Dataset-Funder").results.total + @dataset_connection_count ||= + Event.query(nil, citation_type: "Dataset-Funder").results.total end def software_connection_count - @software_connection_count ||= Event.query(nil, citation_type: "Funder-SoftwareSourceCode").results.total + @software_connection_count ||= + Event.query(nil, citation_type: "Funder-SoftwareSourceCode").results.total end end diff --git a/app/graphql/types/funder_type.rb b/app/graphql/types/funder_type.rb index ce09f52ca..5988abdf1 100644 --- a/app/graphql/types/funder_type.rb +++ b/app/graphql/types/funder_type.rb @@ -5,12 +5,26 @@ class FunderType < BaseObject description "Information about funders" - field :address, AddressType, null: true, description: "Physical address of the funder." - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." + field :address, + AddressType, + null: true, description: "Physical address of the funder." + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Funded datasets" do + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Funded datasets" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -37,7 +51,9 @@ class FunderType < BaseObject argument :after, String, required: false end - field :publications, PublicationConnectionWithTotalType, null: true, description: "Funded publications" do + field :publications, + PublicationConnectionWithTotalType, + null: true, description: "Funded publications" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -64,7 +80,9 @@ class FunderType < BaseObject argument :after, String, required: false end - field :softwares, SoftwareConnectionWithTotalType, null: true, description: "Funded software" do + field :softwares, + SoftwareConnectionWithTotalType, + null: true, description: "Funded software" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -91,7 +109,10 @@ class FunderType < BaseObject argument :after, String, required: false end - field :data_management_plans, DataManagementPlanConnectionWithTotalType, null: true, description: "Data management plans from this organization" do + field :data_management_plans, + DataManagementPlanConnectionWithTotalType, + null: true, + description: "Data management plans from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -118,7 +139,9 @@ class FunderType < BaseObject argument :after, String, required: false end - field :works, WorkConnectionWithTotalType, null: true, description: "Funded works" do + field :works, + WorkConnectionWithTotalType, + null: true, description: "Funded works" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -147,33 +170,50 @@ class FunderType < BaseObject end def address - { "type" => "postalAddress", - "country" => object.country.to_h.fetch("name", nil) } + { + "type" => "postalAddress", + "country" => object.country.to_h.fetch("name", nil), + } end def publications(**args) args[:resource_type_id] = "Text" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def datasets(**args) args[:resource_type_id] = "Dataset" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def softwares(**args) args[:resource_type_id] = "Software" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def data_management_plans(**args) args[:resource_type_id] = "Text" args[:resource_type] = "Data Management Plan" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def works(**args) - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def view_count @@ -195,6 +235,37 @@ def citation_count end def response(**args) - Doi.gql_query(args[:query], ids: args[:ids], funder_id: object.id, user_id: args[:user_id], client_id: args[:repository_id], provider_id: args[:member_id], affiliation_id: args[:affiliation_id], organization_id: args[:organization_id], resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], agency: args[:registration_agency], language: args[:language], license: args[:license], has_person: args[:has_person], has_organization: args[:has_organization], has_affiliation: args[:has_affiliation], has_member: args[:has_member], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], published: args[:published], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + ids: args[:ids], + funder_id: object.id, + user_id: args[:user_id], + client_id: args[:repository_id], + provider_id: args[:member_id], + affiliation_id: args[:affiliation_id], + organization_id: args[:organization_id], + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + agency: args[:registration_agency], + language: args[:language], + license: args[:license], + has_person: args[:has_person], + has_organization: args[:has_organization], + has_affiliation: args[:has_affiliation], + has_member: args[:has_member], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + published: args[:published], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end end diff --git a/app/graphql/types/funding_type.rb b/app/graphql/types/funding_type.rb index 10bcae048..539219e93 100644 --- a/app/graphql/types/funding_type.rb +++ b/app/graphql/types/funding_type.rb @@ -3,10 +3,26 @@ class FundingType < BaseObject description "Information about funding" - field :funder_name, String, null: true, hash_key: "funderName", description: "Funder name" - field :funder_identifier, String, null: true, hash_key: "funderIdentifier", description: "Funder identifier" - field :funder_identifier_type, String, null: true, hash_key: "funderIdentifierType", description: "Funder identifier type" - field :award_number, String, null: true, hash_key: "awardNumber", description: "Award number" - field :award_uri, String, null: true, hash_key: "awardUri", description: "Award URI" - field :award_title, String, null: true, hash_key: "awardTitle", description: "Award title" + field :funder_name, + String, + null: true, hash_key: "funderName", description: "Funder name" + field :funder_identifier, + String, + null: true, + hash_key: "funderIdentifier", + description: "Funder identifier" + field :funder_identifier_type, + String, + null: true, + hash_key: "funderIdentifierType", + description: "Funder identifier type" + field :award_number, + String, + null: true, hash_key: "awardNumber", description: "Award number" + field :award_uri, + String, + null: true, hash_key: "awardUri", description: "Award URI" + field :award_title, + String, + null: true, hash_key: "awardTitle", description: "Award title" end diff --git a/app/graphql/types/geolocation_box_type.rb b/app/graphql/types/geolocation_box_type.rb index 8deffc289..8561007ce 100644 --- a/app/graphql/types/geolocation_box_type.rb +++ b/app/graphql/types/geolocation_box_type.rb @@ -3,8 +3,24 @@ class GeolocationBoxType < BaseObject description "A box is defined by two geographic points. Left low corner and right upper corner." - field :west_bound_longitude, Float, null: false, hash_key: "westBoundLongitude", description: "Western longitudinal dimension of box." - field :east_bound_longitude, Float, null: false, hash_key: "eastBoundLongitude", description: "Eastern longitudinal dimension of box." - field :south_bound_latitude, Float, null: false, hash_key: "southBoundLatitude", description: "Southern latitudinal dimension of box." - field :north_bound_latitude, Float, null: false, hash_key: "northBoundLatitude", description: "Northern latitudinal dimension of box." + field :west_bound_longitude, + Float, + null: false, + hash_key: "westBoundLongitude", + description: "Western longitudinal dimension of box." + field :east_bound_longitude, + Float, + null: false, + hash_key: "eastBoundLongitude", + description: "Eastern longitudinal dimension of box." + field :south_bound_latitude, + Float, + null: false, + hash_key: "southBoundLatitude", + description: "Southern latitudinal dimension of box." + field :north_bound_latitude, + Float, + null: false, + hash_key: "northBoundLatitude", + description: "Northern latitudinal dimension of box." end diff --git a/app/graphql/types/geolocation_point_type.rb b/app/graphql/types/geolocation_point_type.rb index ab0086265..70e4c9857 100644 --- a/app/graphql/types/geolocation_point_type.rb +++ b/app/graphql/types/geolocation_point_type.rb @@ -3,6 +3,14 @@ class GeolocationPointType < BaseObject description "A point contains a single longitude-latitude pair." - field :point_longitude, Float, null: true, hash_key: "pointLongitude", description: "Longitudinal dimension of point." - field :point_latitude, Float, null: true, hash_key: "pointLatitude", description: "Latitudinal dimension of point." + field :point_longitude, + Float, + null: true, + hash_key: "pointLongitude", + description: "Longitudinal dimension of point." + field :point_latitude, + Float, + null: true, + hash_key: "pointLatitude", + description: "Latitudinal dimension of point." end diff --git a/app/graphql/types/geolocation_type.rb b/app/graphql/types/geolocation_type.rb index c31892531..20bd587e6 100644 --- a/app/graphql/types/geolocation_type.rb +++ b/app/graphql/types/geolocation_type.rb @@ -3,7 +3,19 @@ class GeolocationType < BaseObject description "Spatial region or named place where the data was gathered or about which the data is focused." - field :geolocation_point, GeolocationPointType, null: true, hash_key: "geoLocationPoint", description: "A point location in space." - field :geolocation_box, GeolocationBoxType, null: true, hash_key: "geoLocationBox", description: "The spatial limits of a box." - field :geolocation_place, String, null: true, hash_key: "geoLocationPlace", description: "Description of a geographic location." + field :geolocation_point, + GeolocationPointType, + null: true, + hash_key: "geoLocationPoint", + description: "A point location in space." + field :geolocation_box, + GeolocationBoxType, + null: true, + hash_key: "geoLocationBox", + description: "The spatial limits of a box." + field :geolocation_place, + String, + null: true, + hash_key: "geoLocationPlace", + description: "Description of a geographic location." end diff --git a/app/graphql/types/identifier_type.rb b/app/graphql/types/identifier_type.rb index c4e248dd4..638324eb7 100644 --- a/app/graphql/types/identifier_type.rb +++ b/app/graphql/types/identifier_type.rb @@ -3,7 +3,17 @@ class IdentifierType < BaseObject description "Information about identifiers" - field :identifier_type, String, null: true, hash_key: "identifierType", description: "The type of identifier." - field :identifier, String, null: true, description: "The value of the identifier." - field :identifier_url, String, null: true, hash_key: "identifierUrl", description: "The url of the identifier." + field :identifier_type, + String, + null: true, + hash_key: "identifierType", + description: "The type of identifier." + field :identifier, + String, + null: true, description: "The value of the identifier." + field :identifier_url, + String, + null: true, + hash_key: "identifierUrl", + description: "The url of the identifier." end diff --git a/app/graphql/types/image_connection_with_total_type.rb b/app/graphql/types/image_connection_with_total_type.rb index a46169e91..a28ca37ae 100644 --- a/app/graphql/types/image_connection_with_total_type.rb +++ b/app/graphql/types/image_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/instrument_connection_with_total_type.rb b/app/graphql/types/instrument_connection_with_total_type.rb index 5993cf827..f3c6fbb1e 100644 --- a/app/graphql/types/instrument_connection_with_total_type.rb +++ b/app/graphql/types/instrument_connection_with_total_type.rb @@ -20,7 +20,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/interactive_resource_connection_with_total_type.rb b/app/graphql/types/interactive_resource_connection_with_total_type.rb index e69416f6d..8c954cbda 100644 --- a/app/graphql/types/interactive_resource_connection_with_total_type.rb +++ b/app/graphql/types/interactive_resource_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/journal_article_connection_with_total_type.rb b/app/graphql/types/journal_article_connection_with_total_type.rb index 0308b4c29..dedd85822 100644 --- a/app/graphql/types/journal_article_connection_with_total_type.rb +++ b/app/graphql/types/journal_article_connection_with_total_type.rb @@ -22,7 +22,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/member_prefix_type.rb b/app/graphql/types/member_prefix_type.rb index 7caa7cf8c..74dd8fd06 100644 --- a/app/graphql/types/member_prefix_type.rb +++ b/app/graphql/types/member_prefix_type.rb @@ -3,9 +3,15 @@ class MemberPrefixType < BaseObject description "Information about member prefixes" - field :id, ID, null: false, hash_key: "uid", description: "Unique identifier for each provider prefix" + field :id, + ID, + null: false, + hash_key: "uid", + description: "Unique identifier for each provider prefix" field :type, String, null: false, description: "The type of the item." - field :name, String, null: false, hash_key: "prefix_id", description: "Provider prefix name" + field :name, + String, + null: false, hash_key: "prefix_id", description: "Provider prefix name" def type "MemberPrefix" diff --git a/app/graphql/types/member_type.rb b/app/graphql/types/member_type.rb index c1b77a374..970e98004 100644 --- a/app/graphql/types/member_type.rb +++ b/app/graphql/types/member_type.rb @@ -3,25 +3,56 @@ class MemberType < BaseObject description "Information about members" - field :id, ID, null: false, hash_key: "uid", description: "Unique identifier for the member" + field :id, + ID, + null: false, + hash_key: "uid", + description: "Unique identifier for the member" field :type, String, null: false, description: "The type of the item." field :name, String, null: false, description: "Member name" field :displayName, String, null: false, description: "Member display name" - field :ror_id, ID, null: true, description: "Research Organization Registry (ROR) identifier" - field :description, String, null: true, description: "Description of the member" + field :ror_id, + ID, + null: true, + description: "Research Organization Registry (ROR) identifier" + field :description, + String, + null: true, description: "Description of the member" field :website, Url, null: true, description: "Website of the member" field :logo_url, Url, null: true, description: "URL for the member logo" - field :region, String, null: true, description: "Geographic region where the member is located" - field :country, CountryType, null: true, description: "Country where the member is located" + field :region, + String, + null: true, description: "Geographic region where the member is located" + field :country, + CountryType, + null: true, description: "Country where the member is located" field :member_role, MemberRoleType, null: true, description: "Membership type" - field :organization_type, String, null: true, description: "Type of organization" - field :focus_area, String, null: true, description: "Field of science covered by member" - field :joined, GraphQL::Types::ISO8601Date, null: true, description: "Date member joined DataCite" - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." - - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Datasets by this provider." do + field :organization_type, + String, + null: true, description: "Type of organization" + field :focus_area, + String, + null: true, description: "Field of science covered by member" + field :joined, + GraphQL::Types::ISO8601Date, + null: true, description: "Date member joined DataCite" + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." + + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Datasets by this provider." do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -47,7 +78,9 @@ class MemberType < BaseObject argument :after, String, required: false end - field :publications, PublicationConnectionWithTotalType, null: true, description: "Publications by this provider." do + field :publications, + PublicationConnectionWithTotalType, + null: true, description: "Publications by this provider." do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -73,7 +106,9 @@ class MemberType < BaseObject argument :after, String, required: false end - field :softwares, SoftwareConnectionWithTotalType, null: true, description: "Software by this provider." do + field :softwares, + SoftwareConnectionWithTotalType, + null: true, description: "Software by this provider." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -99,7 +134,10 @@ class MemberType < BaseObject argument :after, String, required: false end - field :data_management_plans, DataManagementPlanConnectionWithTotalType, null: true, description: "Data management plans from this organization" do + field :data_management_plans, + DataManagementPlanConnectionWithTotalType, + null: true, + description: "Data management plans from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -122,7 +160,9 @@ class MemberType < BaseObject argument :after, String, required: false end - field :works, WorkConnectionWithTotalType, null: true, description: "Works by this provider." do + field :works, + WorkConnectionWithTotalType, + null: true, description: "Works by this provider." do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -149,7 +189,9 @@ class MemberType < BaseObject argument :after, String, required: false end - field :prefixes, MemberPrefixConnectionWithTotalType, null: true, description: "Prefixes managed by the member" do + field :prefixes, + MemberPrefixConnectionWithTotalType, + null: true, description: "Prefixes managed by the member" do argument :query, String, required: false argument :state, String, required: false argument :year, String, required: false @@ -157,7 +199,9 @@ class MemberType < BaseObject argument :after, String, required: false end - field :repositories, RepositoryConnectionWithTotalType, null: true, description: "Repositories associated with the member" do + field :repositories, + RepositoryConnectionWithTotalType, + null: true, description: "Repositories associated with the member" do argument :query, String, required: false argument :year, String, required: false argument :software, String, required: false @@ -170,73 +214,157 @@ def type end def member_role - { "id" => object.member_type, - "name" => object.member_type.titleize } + { "id" => object.member_type, "name" => object.member_type.titleize } end def country return {} if object.country_code.blank? { - id: object.country_code, - name: ISO3166::Country[object.country_code].name, + id: object.country_code, name: ISO3166::Country[object.country_code].name }.compact end def publications(**args) args[:resource_type_id] = "Text" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def datasets(**args) args[:resource_type_id] = "Dataset" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def softwares(**args) args[:resource_type_id] = "Software" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def data_management_plans(**args) args[:resource_type_id] = "Text" args[:resource_type] = "Data Management Plan" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def works(**args) - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def prefixes(**args) - response = ProviderPrefix.query(args[:query], provider_id: object.uid, state: args[:state], year: args[:year], page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) - ElasticsearchModelResponseConnection.new(response, context: context, first: args[:first], after: args[:after]) + response = + ProviderPrefix.query( + args[:query], + provider_id: object.uid, + state: args[:state], + year: args[:year], + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) + ElasticsearchModelResponseConnection.new( + response, + context: context, first: args[:first], after: args[:after], + ) end def repositories(**args) - response = Client.query(args[:query], provider_id: object.uid, year: args[:year], software: args[:software], page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) - ElasticsearchModelResponseConnection.new(response, context: context, first: args[:first], after: args[:after]) + response = + Client.query( + args[:query], + provider_id: object.uid, + year: args[:year], + software: args[:software], + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) + ElasticsearchModelResponseConnection.new( + response, + context: context, first: args[:first], after: args[:after], + ) end def view_count args = { first: 0 } r = response(args) - r.results.total.positive? ? aggregate_count(r.response.aggregations.views.buckets) : 0 + if r.results.total.positive? + aggregate_count(r.response.aggregations.views.buckets) + else + 0 + end end def download_count args = { first: 0 } r = response(args) - r.results.total.positive? ? aggregate_count(r.response.aggregations.downloads.buckets) : 0 + if r.results.total.positive? + aggregate_count(r.response.aggregations.downloads.buckets) + else + 0 + end end def citation_count args = { first: 0 } r = response(args) - r.results.total.positive? ? aggregate_count(r.response.aggregations.citations.buckets) : 0 + if r.results.total.positive? + aggregate_count(r.response.aggregations.citations.buckets) + else + 0 + end end def response(**args) - Doi.gql_query(args[:query], ids: args[:ids], user_id: args[:user_id], client_id: args[:repository_id], provider_id: object.member_type == "consortium" ? nil : object.uid, consortium_id: object.member_type == "consortium" ? object.uid : nil, funder_id: args[:funder_id], affiliation_id: args[:affiliation_id], organization_id: args[:organization_id], resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], has_person: args[:has_person], has_funder: args[:has_funder], has_affiliation: args[:has_affiliation], has_organization: args[:has_organization], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], published: args[:published], language: args[:language], license: args[:license], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + ids: args[:ids], + user_id: args[:user_id], + client_id: args[:repository_id], + provider_id: object.member_type == "consortium" ? nil : object.uid, + consortium_id: object.member_type == "consortium" ? object.uid : nil, + funder_id: args[:funder_id], + affiliation_id: args[:affiliation_id], + organization_id: args[:organization_id], + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + has_person: args[:has_person], + has_funder: args[:has_funder], + has_affiliation: args[:has_affiliation], + has_organization: args[:has_organization], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + published: args[:published], + language: args[:language], + license: args[:license], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end end diff --git a/app/graphql/types/model_connection_with_total_type.rb b/app/graphql/types/model_connection_with_total_type.rb index c12c6f0e3..85e11c5e7 100644 --- a/app/graphql/types/model_connection_with_total_type.rb +++ b/app/graphql/types/model_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/organization_connection_with_total_type.rb b/app/graphql/types/organization_connection_with_total_type.rb index 73a277fbc..b67947ec4 100644 --- a/app/graphql/types/organization_connection_with_total_type.rb +++ b/app/graphql/types/organization_connection_with_total_type.rb @@ -8,9 +8,9 @@ class OrganizationConnectionWithTotalType < BaseConnection # using latest release in any given year, starting with end of 2017, # right before ROR was launched in January 2018 YEARS = [ - { "id" => "2017", "title" => "2017", "count" => 80248 }, - { "id" => "2018", "title" => "2018", "count" => 11392 }, - { "id" => "2019", "title" => "2019", "count" => 6179 }, + { "id" => "2017", "title" => "2017", "count" => 80_248 }, + { "id" => "2018", "title" => "2018", "count" => 11_392 }, + { "id" => "2019", "title" => "2019", "count" => 6_179 }, ].freeze field :total_count, Integer, null: false, cache: true @@ -20,11 +20,19 @@ class OrganizationConnectionWithTotalType < BaseConnection field :person_connection_count, Integer, null: false, cache: true def years - count = YEARS.reduce(0) do |sum, i| - sum += i["count"] - sum - end - this_year = object.total_count > count ? { "id" => "2020", "title" => "2020", "count" => object.total_count - count } : nil + count = + YEARS.reduce(0) do |sum, i| + sum += i["count"] + sum + end + this_year = + if object.total_count > count + { + "id" => "2020", + "title" => "2020", + "count" => object.total_count - count, + } + end this_year ? YEARS << this_year : YEARS end @@ -41,6 +49,7 @@ def countries end def person_connection_count - @person_connection_count ||= Event.query(nil, citation_type: "Organization-Person").results.total + @person_connection_count ||= + Event.query(nil, citation_type: "Organization-Person").results.total end end diff --git a/app/graphql/types/organization_type.rb b/app/graphql/types/organization_type.rb index dd2fc3631..687127a6e 100644 --- a/app/graphql/types/organization_type.rb +++ b/app/graphql/types/organization_type.rb @@ -13,22 +13,57 @@ class OrganizationType < BaseObject description "Information about organizations" - field :identifiers, [IdentifierType], null: true, description: "The identifier(s) for the organization." - field :member_id, ID, null: true, description: "Unique member identifier if a DataCite member" - field :member_role_id, String, null: true, description: "Membership role id if a DataCite member" - field :member_role_name, String, null: true, description: "Membership role name if a DataCite member" - field :url, [Url], null: true, hash_key: "links", description: "URL of the organization." - field :wikipedia_url, Url, null: true, hash_key: "wikipedia_url", description: "Wikipedia URL of the organization." - field :twitter, String, null: true, description: "Twitter username of the organization." + field :identifiers, + [IdentifierType], + null: true, description: "The identifier(s) for the organization." + field :member_id, + ID, + null: true, description: "Unique member identifier if a DataCite member" + field :member_role_id, + String, + null: true, description: "Membership role id if a DataCite member" + field :member_role_name, + String, + null: true, description: "Membership role name if a DataCite member" + field :url, + [Url], + null: true, hash_key: "links", description: "URL of the organization." + field :wikipedia_url, + Url, + null: true, + hash_key: "wikipedia_url", + description: "Wikipedia URL of the organization." + field :twitter, + String, + null: true, description: "Twitter username of the organization." field :types, [String], null: true, description: "The type of organization." - field :country, CountryType, null: true, description: "Country of the organization." - field :inception_year, Int, null: true, description: "Year when the organization came into existence." - field :geolocation, GeolocationPointType, null: true, description: "Geolocation of the organization." - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." + field :country, + CountryType, + null: true, description: "Country of the organization." + field :inception_year, + Int, + null: true, + description: "Year when the organization came into existence." + field :geolocation, + GeolocationPointType, + null: true, description: "Geolocation of the organization." + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Datasets from this organization" do + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Datasets from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -55,7 +90,9 @@ class OrganizationType < BaseObject argument :after, String, required: false end - field :publications, PublicationConnectionWithTotalType, null: true, description: "Publications from this organization" do + field :publications, + PublicationConnectionWithTotalType, + null: true, description: "Publications from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -82,7 +119,9 @@ class OrganizationType < BaseObject argument :after, String, required: false end - field :softwares, SoftwareConnectionWithTotalType, null: true, description: "Software from this organization" do + field :softwares, + SoftwareConnectionWithTotalType, + null: true, description: "Software from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -109,7 +148,10 @@ class OrganizationType < BaseObject argument :after, String, required: false end - field :data_management_plans, DataManagementPlanConnectionWithTotalType, null: true, description: "Data management plans from this organization" do + field :data_management_plans, + DataManagementPlanConnectionWithTotalType, + null: true, + description: "Data management plans from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -136,7 +178,9 @@ class OrganizationType < BaseObject argument :after, String, required: false end - field :works, WorkConnectionWithTotalType, null: true, description: "Works from this organization" do + field :works, + WorkConnectionWithTotalType, + null: true, description: "Works from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -163,7 +207,9 @@ class OrganizationType < BaseObject argument :after, String, required: false end - field :people, PersonConnectionWithTotalType, null: true, description: "People from this organization" do + field :people, + PersonConnectionWithTotalType, + null: true, description: "People from this organization" do argument :query, String, required: false argument :first, Int, required: false, default_value: 25 argument :after, String, required: false @@ -174,12 +220,20 @@ def alternate_name end def member - m = Provider.unscoped.where("allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_MEMBER')").where(deleted_at: nil).where(ror_id: object.id).first + m = + Provider.unscoped.where( + "allocator.role_name IN ('ROLE_FOR_PROFIT_PROVIDER', 'ROLE_CONSORTIUM' , 'ROLE_CONSORTIUM_ORGANIZATION', 'ROLE_ALLOCATOR', 'ROLE_MEMBER')", + ). + where(deleted_at: nil). + where(ror_id: object.id). + first return {} if m.blank? - { "member_id" => m.symbol.downcase, + { + "member_id" => m.symbol.downcase, "member_role_id" => MEMBER_ROLES[m.role_name], - "member_role_name" => MEMBER_ROLES[m.role_name].titleize } + "member_role_name" => MEMBER_ROLES[m.role_name].titleize, + } end def member_id @@ -195,51 +249,84 @@ def member_role_name end def geolocation - { "pointLongitude" => object.dig("geolocation", "longitude"), - "pointLatitude" => object.dig("geolocation", "latitude") } + { + "pointLongitude" => object.dig("geolocation", "longitude"), + "pointLatitude" => object.dig("geolocation", "latitude"), + } end def identifiers - object.fundref.map { |o| { "identifierType" => "fundref", "identifier" => o } } + - Array.wrap(object.wikidata).map { |o| { "identifierType" => "wikidata", "identifier" => o } } + - Array.wrap(object.grid).map { |o| { "identifierType" => "grid", "identifier" => o } } + + object.fundref.map do |o| + { "identifierType" => "fundref", "identifier" => o } + end + + Array.wrap(object.wikidata).map do |o| + { "identifierType" => "wikidata", "identifier" => o } + end + + Array.wrap(object.grid).map do |o| + { "identifierType" => "grid", "identifier" => o } + end + object.isni.map { |o| { "identifierType" => "isni", "identifier" => o } } end def publications(**args) args[:resource_type_id] = "Text" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def datasets(**args) args[:resource_type_id] = "Dataset" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def softwares(**args) args[:resource_type_id] = "Software" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def data_management_plans(**args) args[:resource_type_id] = "Text" args[:resource_type] = "Data Management Plan" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def works(**args) - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def people(**args) grid_query = "grid-org-id:#{object.grid}" - ringgold_query = object.ringgold.present? ? "ringgold-org-id:#{object.ringgold}" : "" + ringgold_query = + object.ringgold.present? ? "ringgold-org-id:#{object.ringgold}" : "" org_query = [grid_query, ringgold_query].compact.join(" OR ") query_query = args[:query].present? ? "(#{args[:query]})" : nil query = ["(#{org_query})", query_query].compact.join(" AND ") - response = Person.query(query, limit: args[:first], offset: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : nil) - HashConnection.new(response, context: context, first: args[:first], after: args[:after]) + response = + Person.query( + query, + limit: args[:first], + offset: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : nil, + ) + HashConnection.new( + response, + context: context, first: args[:first], after: args[:after], + ) end def view_count @@ -261,6 +348,40 @@ def citation_count end def response(**args) - Doi.gql_query(args[:query], ids: args[:ids], affiliation_id: object.id, organization_id: object.id, member_id: %w(direct_member consortium_organization).include?(member["member_role_id"]) ? object.id : nil, user_id: args[:user_id], client_id: args[:repository_id], funder_id: args[:funder_id] || object.fundref.join(","), resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], agency: args[:registration_agency], language: args[:language], license: args[:license], has_person: args[:has_person], has_funder: args[:has_funder], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], published: args[:published], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + ids: args[:ids], + affiliation_id: object.id, + organization_id: object.id, + member_id: + if %w[direct_member consortium_organization].include?( + member["member_role_id"], + ) + object.id + end, + user_id: args[:user_id], + client_id: args[:repository_id], + funder_id: args[:funder_id] || object.fundref.join(","), + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + agency: args[:registration_agency], + language: args[:language], + license: args[:license], + has_person: args[:has_person], + has_funder: args[:has_funder], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + published: args[:published], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end end diff --git a/app/graphql/types/other_connection_with_total_type.rb b/app/graphql/types/other_connection_with_total_type.rb index 1cbe18ee7..327aeff53 100644 --- a/app/graphql/types/other_connection_with_total_type.rb +++ b/app/graphql/types/other_connection_with_total_type.rb @@ -21,7 +21,9 @@ def years end def registration_agencies - facet_by_registration_agency(object_id.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object_id.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/person_connection_with_total_type.rb b/app/graphql/types/person_connection_with_total_type.rb index 1dab23e92..53ba2590c 100644 --- a/app/graphql/types/person_connection_with_total_type.rb +++ b/app/graphql/types/person_connection_with_total_type.rb @@ -6,14 +6,14 @@ class PersonConnectionWithTotalType < BaseConnection # data from Tom Demeranville (ORCID) on Sep 15, 2020 YEARS = [ - { "id" => "2012", "title" => "2012", "count" => 44270 }, - { "id" => "2013", "title" => "2013", "count" => 426775 }, - { "id" => "2014", "title" => "2014", "count" => 612300 }, - { "id" => "2015", "title" => "2015", "count" => 788650 }, - { "id" => "2016", "title" => "2016", "count" => 1068295 }, - { "id" => "2017", "title" => "2017", "count" => 1388796 }, - { "id" => "2018", "title" => "2018", "count" => 1585851 }, - { "id" => "2019", "title" => "2019", "count" => 2006672 }, + { "id" => "2012", "title" => "2012", "count" => 44_270 }, + { "id" => "2013", "title" => "2013", "count" => 426_775 }, + { "id" => "2014", "title" => "2014", "count" => 612_300 }, + { "id" => "2015", "title" => "2015", "count" => 788_650 }, + { "id" => "2016", "title" => "2016", "count" => 1_068_295 }, + { "id" => "2017", "title" => "2017", "count" => 1_388_796 }, + { "id" => "2018", "title" => "2018", "count" => 1_585_851 }, + { "id" => "2019", "title" => "2019", "count" => 2_006_672 }, ].freeze field :total_count, Integer, null: false, cache: true @@ -28,27 +28,55 @@ def total_count end def years - count = YEARS.reduce(0) do |sum, i| - sum += i["count"] - sum - end - this_year = object.total_count > count ? { "id" => "2020", "title" => "2020", "count" => object.total_count - count } : nil + count = + YEARS.reduce(0) do |sum, i| + sum += i["count"] + sum + end + this_year = + if object.total_count > count + { + "id" => "2020", + "title" => "2020", + "count" => object.total_count - count, + } + end this_year ? YEARS << this_year : YEARS end def publication_connection_count - Event.query(nil, citation_type: "Person-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Person-ScholarlyArticle", page: { number: 1, size: 0 }, + ). + results. + total end def dataset_connection_count - Event.query(nil, citation_type: "Dataset-Person", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Dataset-Person", page: { number: 1, size: 0 }, + ). + results. + total end def software_connection_count - Event.query(nil, citation_type: "Person-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Person-SoftwareSourceCode", page: { number: 1, size: 0 }, + ). + results. + total end def organization_connection_count - Event.query(nil, citation_type: "Organization-Person", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Organization-Person", page: { number: 1, size: 0 }, + ). + results. + total end end diff --git a/app/graphql/types/person_type.rb b/app/graphql/types/person_type.rb index b19eb9d76..a61820a42 100644 --- a/app/graphql/types/person_type.rb +++ b/app/graphql/types/person_type.rb @@ -5,18 +5,45 @@ class PersonType < BaseObject description "A person." - field :given_name, String, null: true, description: "Given name. In the U.S., the first name of a person." - field :family_name, String, null: true, description: "Family name. In the U.S., the last name of an person." + field :given_name, + String, + null: true, + description: "Given name. In the U.S., the first name of a person." + field :family_name, + String, + null: true, + description: "Family name. In the U.S., the last name of an person." field :description, String, null: true, description: "Biography of a Person." - field :links, [LinkType], null: true, description: "The links to other relevant web pages about the person." - field :identifiers, [IdentifierType], null: true, description: "The identifier(s) for the person." - field :country, CountryType, null: true, description: "Country where the person is located." - field :employment, [EmploymentType], null: true, description: "The employments of the person" - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." + field :links, + [LinkType], + null: true, + description: "The links to other relevant web pages about the person." + field :identifiers, + [IdentifierType], + null: true, description: "The identifier(s) for the person." + field :country, + CountryType, + null: true, description: "Country where the person is located." + field :employment, + [EmploymentType], + null: true, description: "The employments of the person" + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Authored datasets" do + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Authored datasets" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -41,7 +68,9 @@ class PersonType < BaseObject argument :after, String, required: false end - field :publications, PublicationConnectionWithTotalType, null: true, description: "Authored publications" do + field :publications, + PublicationConnectionWithTotalType, + null: true, description: "Authored publications" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -66,7 +95,9 @@ class PersonType < BaseObject argument :after, String, required: false end - field :softwares, SoftwareConnectionWithTotalType, null: true, description: "Authored software" do + field :softwares, + SoftwareConnectionWithTotalType, + null: true, description: "Authored software" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -90,7 +121,9 @@ class PersonType < BaseObject argument :after, String, required: false end - field :works, WorkConnectionWithTotalType, null: true, description: "Authored works" do + field :works, + WorkConnectionWithTotalType, + null: true, description: "Authored works" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -118,21 +151,33 @@ class PersonType < BaseObject def publications(**args) args[:resource_type_id] = "Text" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def datasets(**args) args[:resource_type_id] = "Dataset" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def softwares(**args) args[:resource_type_id] = "Software" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def works(**args) - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def view_count @@ -154,6 +199,35 @@ def citation_count end def response(**args) - Doi.gql_query(args[:query], ids: args[:ids], user_id: object[:id], client_id: args[:repository_id], provider_id: args[:member_id], affiliation_id: args[:affiliation_id], organization_id: args[:organization_id], resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], agency: args[:registration_agency], has_funder: args[:has_funder], has_affiliation: args[:has_affiliation], has_organization: args[:has_organization], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], published: args[:published], license: args[:license], language: args[:language], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + ids: args[:ids], + user_id: object[:id], + client_id: args[:repository_id], + provider_id: args[:member_id], + affiliation_id: args[:affiliation_id], + organization_id: args[:organization_id], + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + agency: args[:registration_agency], + has_funder: args[:has_funder], + has_affiliation: args[:has_affiliation], + has_organization: args[:has_organization], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + published: args[:published], + license: args[:license], + language: args[:language], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end end diff --git a/app/graphql/types/physical_object_connection_with_total_type.rb b/app/graphql/types/physical_object_connection_with_total_type.rb index b963cec74..1b6f54de4 100644 --- a/app/graphql/types/physical_object_connection_with_total_type.rb +++ b/app/graphql/types/physical_object_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/prefix_type.rb b/app/graphql/types/prefix_type.rb index 3afdb2950..a54ba529c 100644 --- a/app/graphql/types/prefix_type.rb +++ b/app/graphql/types/prefix_type.rb @@ -3,7 +3,11 @@ class PrefixType < BaseObject description "Information about prefixes" - field :id, ID, null: false, hash_key: "uid", description: "Unique identifier for each prefix" + field :id, + ID, + null: false, + hash_key: "uid", + description: "Unique identifier for each prefix" field :type, String, null: false, description: "The type of the item." def type diff --git a/app/graphql/types/preprint_connection_with_total_type.rb b/app/graphql/types/preprint_connection_with_total_type.rb index d13be3aaa..21af62889 100644 --- a/app/graphql/types/preprint_connection_with_total_type.rb +++ b/app/graphql/types/preprint_connection_with_total_type.rb @@ -22,7 +22,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/publication_connection_with_total_type.rb b/app/graphql/types/publication_connection_with_total_type.rb index 782903b61..abe2554ea 100644 --- a/app/graphql/types/publication_connection_with_total_type.rb +++ b/app/graphql/types/publication_connection_with_total_type.rb @@ -53,26 +53,65 @@ def fields_of_science end def publication_connection_count - @publication_connection_count ||= Event.query(nil, citation_type: "ScholarlyArticle-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @publication_connection_count ||= + Event.query( + nil, + citation_type: "ScholarlyArticle-ScholarlyArticle", + page: { number: 1, size: 0 }, + ). + results. + total end def dataset_connection_count - @dataset_connection_count ||= Event.query(nil, citation_type: "Dataset-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @dataset_connection_count ||= + Event.query( + nil, + citation_type: "Dataset-ScholarlyArticle", page: { number: 1, size: 0 }, + ). + results. + total end def software_connection_count - @software_connection_count ||= Event.query(nil, citation_type: "ScholarlyArticle-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + @software_connection_count ||= + Event.query( + nil, + citation_type: "ScholarlyArticle-SoftwareSourceCode", + page: { number: 1, size: 0 }, + ). + results. + total end def person_connection_count - @person_connection_count ||= Event.query(nil, citation_type: "Person-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @person_connection_count ||= + Event.query( + nil, + citation_type: "Person-ScholarlyArticle", page: { number: 1, size: 0 }, + ). + results. + total end def funder_connection_count - @funder_connection_count ||= Event.query(nil, citation_type: "Funder-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @funder_connection_count ||= + Event.query( + nil, + citation_type: "Funder-ScholarlyArticle", page: { number: 1, size: 0 }, + ). + results. + total end def organization_connection_count - @organization_connection_count ||= Event.query(nil, citation_type: "Organization-ScholarlyArticle", page: { number: 1, size: 0 }).results.total + @organization_connection_count ||= + Event.query( + nil, + citation_type: "Organization-ScholarlyArticle", + page: { number: 1, size: 0 }, + ). + results. + total end end diff --git a/app/graphql/types/query_type.rb b/app/graphql/types/query_type.rb index 217c9c260..c5e68345f 100644 --- a/app/graphql/types/query_type.rb +++ b/app/graphql/types/query_type.rb @@ -193,12 +193,12 @@ def actors(**args) def actor(id:) result = if orcid_from_url(id) - Person.find_by(id: id).fetch(:data, []).first - elsif ror_id_from_url(id) - Organization.find_by(id: id).fetch(:data, []).first - elsif doi_from_url(id).to_s.starts_with?("10.13039") - Funder.find_by(id: id).fetch(:data, []).first - end + Person.find_by(id: id).fetch(:data, []).first + elsif ror_id_from_url(id) + Organization.find_by(id: id).fetch(:data, []).first + elsif doi_from_url(id).to_s.starts_with?("10.13039") + Funder.find_by(id: id).fetch(:data, []).first + end fail ActiveRecord::RecordNotFound if result.nil? diff --git a/app/graphql/types/related_identifier_type.rb b/app/graphql/types/related_identifier_type.rb index 9c3e651e9..0e402660a 100644 --- a/app/graphql/types/related_identifier_type.rb +++ b/app/graphql/types/related_identifier_type.rb @@ -3,11 +3,33 @@ class RelatedIdentifierType < BaseObject description "Information about related identifiers" - field :related_identifier, String, null: false, hash_key: "relatedIdentifier", description: "Related identifier" - field :related_identifier_type, String, null: false, hash_key: "relatedIdentifierType", description: "Related identifier type" - field :relation_type, String, null: false, hash_key: "relationType", description: "Relation type" - field :related_metadata_scheme, String, null: true, hash_key: "relatedMetadataScheme", description: "Related metadata scheme" - field :scheme_uri, String, null: true, hash_key: "schemeUri", description: "Scheme URI" - field :scheme_type, String, null: true, hash_key: "schemeType", description: "Scheme type" - field :resource_type_general, String, null: true, hash_key: "resourceTypeGeneral", description: "Resource type general" + field :related_identifier, + String, + null: false, + hash_key: "relatedIdentifier", + description: "Related identifier" + field :related_identifier_type, + String, + null: false, + hash_key: "relatedIdentifierType", + description: "Related identifier type" + field :relation_type, + String, + null: false, hash_key: "relationType", description: "Relation type" + field :related_metadata_scheme, + String, + null: true, + hash_key: "relatedMetadataScheme", + description: "Related metadata scheme" + field :scheme_uri, + String, + null: true, hash_key: "schemeUri", description: "Scheme URI" + field :scheme_type, + String, + null: true, hash_key: "schemeType", description: "Scheme type" + field :resource_type_general, + String, + null: true, + hash_key: "resourceTypeGeneral", + description: "Resource type general" end diff --git a/app/graphql/types/repository_prefix_type.rb b/app/graphql/types/repository_prefix_type.rb index 0eec6f5c3..b6f16a52c 100644 --- a/app/graphql/types/repository_prefix_type.rb +++ b/app/graphql/types/repository_prefix_type.rb @@ -3,9 +3,17 @@ class RepositoryPrefixType < BaseObject description "Information about repository prefixes" - field :id, ID, null: false, hash_key: "uid", description: "Unique identifier for each repository prefix" + field :id, + ID, + null: false, + hash_key: "uid", + description: "Unique identifier for each repository prefix" field :type, String, null: false, description: "The type of the item." - field :name, String, null: false, hash_key: "prefix_id", description: "Repository prefix name" + field :name, + String, + null: false, + hash_key: "prefix_id", + description: "Repository prefix name" def type "RepositoryPrefix" diff --git a/app/graphql/types/repository_type.rb b/app/graphql/types/repository_type.rb index 12d1343d7..b46aeefe1 100644 --- a/app/graphql/types/repository_type.rb +++ b/app/graphql/types/repository_type.rb @@ -3,25 +3,59 @@ class RepositoryType < BaseObject description "Information about repositories" - field :id, ID, null: false, hash_key: "uid", description: "Unique identifier for each repository" + field :id, + ID, + null: false, + hash_key: "uid", + description: "Unique identifier for each repository" field :type, String, null: false, description: "The type of the item." - field :re3data_id, ID, null: true, description: "The re3data identifier for the repository" + field :re3data_id, + ID, + null: true, description: "The re3data identifier for the repository" field :name, String, null: false, description: "Repository name" - field :alternate_name, String, null: true, description: "Repository alternate name" - field :description, String, null: true, description: "Description of the repository" + field :alternate_name, + String, + null: true, description: "Repository alternate name" + field :description, + String, + null: true, description: "Description of the repository" field :url, Url, null: true, description: "The homepage of the repository" - field :software, String, null: true, description: "The name of the software that is used to run the repository" - field :client_type, String, null: true, description: "The client type (repository or periodical)" - field :repository_type, [String], null: true, description: "The repository type(s)" - field :certificate, [String], null: true, description: "The certificate(s) for the repository" - field :language, [String], null: true, description: "The langauge of the repository" + field :software, + String, + null: true, + description: + "The name of the software that is used to run the repository" + field :client_type, + String, + null: true, description: "The client type (repository or periodical)" + field :repository_type, + [String], + null: true, description: "The repository type(s)" + field :certificate, + [String], + null: true, description: "The certificate(s) for the repository" + field :language, + [String], + null: true, description: "The langauge of the repository" field :issn, IssnType, null: true, description: "The ISSN" - field :view_count, Integer, null: true, description: "The number of views according to the Counter Code of Practice." - field :download_count, Integer, null: true, description: "The number of downloads according to the Counter Code of Practice." - field :citation_count, Integer, null: true, description: "The number of citations." + field :view_count, + Integer, + null: true, + description: + "The number of views according to the Counter Code of Practice." + field :download_count, + Integer, + null: true, + description: + "The number of downloads according to the Counter Code of Practice." + field :citation_count, + Integer, + null: true, description: "The number of citations." - field :datasets, DatasetConnectionWithTotalType, null: true, description: "Datasets managed by the repository" do + field :datasets, + DatasetConnectionWithTotalType, + null: true, description: "Datasets managed by the repository" do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -49,7 +83,9 @@ class RepositoryType < BaseObject argument :after, String, required: false end - field :publications, PublicationConnectionWithTotalType, null: true, description: "Publications managed by the repository" do + field :publications, + PublicationConnectionWithTotalType, + null: true, description: "Publications managed by the repository" do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -77,7 +113,9 @@ class RepositoryType < BaseObject argument :after, String, required: false end - field :softwares, SoftwareConnectionWithTotalType, null: true, description: "Software managed by the repository" do + field :softwares, + SoftwareConnectionWithTotalType, + null: true, description: "Software managed by the repository" do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -105,7 +143,10 @@ class RepositoryType < BaseObject argument :after, String, required: false end - field :data_management_plans, DataManagementPlanConnectionWithTotalType, null: true, description: "Data management plans from this organization" do + field :data_management_plans, + DataManagementPlanConnectionWithTotalType, + null: true, + description: "Data management plans from this organization" do argument :query, String, required: false argument :ids, [String], required: false argument :published, String, required: false @@ -132,7 +173,9 @@ class RepositoryType < BaseObject argument :after, String, required: false end - field :works, WorkConnectionWithTotalType, null: true, description: "Works managed by the repository" do + field :works, + WorkConnectionWithTotalType, + null: true, description: "Works managed by the repository" do argument :query, String, required: false argument :ids, String, required: false argument :published, String, required: false @@ -161,7 +204,9 @@ class RepositoryType < BaseObject argument :after, String, required: false end - field :prefixes, RepositoryPrefixConnectionWithTotalType, null: true, description: "Prefixes managed by the repository" do + field :prefixes, + RepositoryPrefixConnectionWithTotalType, + null: true, description: "Prefixes managed by the repository" do argument :query, String, required: false argument :state, String, required: false argument :year, String, required: false @@ -175,32 +220,57 @@ def type def datasets(**args) args[:resource_type_id] = "Dataset" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def publications(**args) args[:resource_type_id] = "Text" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def softwares(**args) args[:resource_type_id] = "Software" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def data_management_plans(**args) args[:resource_type_id] = "Text" args[:resource_type] = "Data Management Plan" - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def works(**args) - ElasticsearchModelResponseConnection.new(response(args), context: context, first: args[:first], after: args[:after]) + ElasticsearchModelResponseConnection.new( + response(args), + context: context, first: args[:first], after: args[:after], + ) end def prefixes(**args) - response = ClientPrefix.query(args[:query], client_id: object.uid, state: args[:state], year: args[:year], page: { number: 1, size: args[:first] }) - ElasticsearchModelResponseConnection.new(response, context: context, first: args[:first], after: args[:after]) + response = + ClientPrefix.query( + args[:query], + client_id: object.uid, + state: args[:state], + year: args[:year], + page: { number: 1, size: args[:first] }, + ) + ElasticsearchModelResponseConnection.new( + response, + context: context, first: args[:first], after: args[:after], + ) end def view_count @@ -222,6 +292,37 @@ def citation_count end def response(**args) - Doi.gql_query(args[:query], funder_id: args[:funder_id], user_id: args[:user_id], client_id: object.uid, provider_id: args[:member_id], affiliation_id: args[:affiliation_id], organization_id: args[:organization_id], resource_type_id: args[:resource_type_id], resource_type: args[:resource_type], agency: args[:registration_agency], language: args[:language], license: args[:license], has_person: args[:has_person], has_organization: args[:has_organization], has_affiliation: args[:has_affiliation], has_member: args[:has_member], has_funder: args[:has_funder], has_citations: args[:has_citations], has_parts: args[:has_parts], has_versions: args[:has_versions], has_views: args[:has_views], has_downloads: args[:has_downloads], field_of_science: args[:field_of_science], published: args[:published], state: "findable", page: { cursor: args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], size: args[:first] }) + Doi.gql_query( + args[:query], + funder_id: args[:funder_id], + user_id: args[:user_id], + client_id: object.uid, + provider_id: args[:member_id], + affiliation_id: args[:affiliation_id], + organization_id: args[:organization_id], + resource_type_id: args[:resource_type_id], + resource_type: args[:resource_type], + agency: args[:registration_agency], + language: args[:language], + license: args[:license], + has_person: args[:has_person], + has_organization: args[:has_organization], + has_affiliation: args[:has_affiliation], + has_member: args[:has_member], + has_funder: args[:has_funder], + has_citations: args[:has_citations], + has_parts: args[:has_parts], + has_versions: args[:has_versions], + has_views: args[:has_views], + has_downloads: args[:has_downloads], + field_of_science: args[:field_of_science], + published: args[:published], + state: "findable", + page: { + cursor: + args[:after].present? ? Base64.urlsafe_decode64(args[:after]) : [], + size: args[:first], + }, + ) end end diff --git a/app/graphql/types/resource_type_type.rb b/app/graphql/types/resource_type_type.rb index c902509ad..b1324dde5 100644 --- a/app/graphql/types/resource_type_type.rb +++ b/app/graphql/types/resource_type_type.rb @@ -6,7 +6,15 @@ class ResourceTypeType < BaseObject field :ris, String, null: true, description: "RIS" field :bibtex, String, null: true, hash_key: "bibtex", description: "BibTex" field :citeproc, String, null: true, description: "Citeproc" - field :schema_org, String, null: true, hash_key: "schemaOrg", description: "Schema.org" - field :resource_type, String, null: true, hash_key: "resourceType", description: "Resource type" - field :resource_type_general, String, null: true, hash_key: "resourceTypeGeneral", description: "Resource type general" + field :schema_org, + String, + null: true, hash_key: "schemaOrg", description: "Schema.org" + field :resource_type, + String, + null: true, hash_key: "resourceType", description: "Resource type" + field :resource_type_general, + String, + null: true, + hash_key: "resourceTypeGeneral", + description: "Resource type general" end diff --git a/app/graphql/types/rights_type.rb b/app/graphql/types/rights_type.rb index 9fb43ba58..3c3b541ee 100644 --- a/app/graphql/types/rights_type.rb +++ b/app/graphql/types/rights_type.rb @@ -3,10 +3,28 @@ class RightsType < BaseObject description "Information about rights" - field :rights, String, null: true, description: "Any rights information for this resource." - field :rights_uri, String, null: true, hash_key: "rightsUri", description: "The URI of the license." - field :rights_identifier, String, null: true, hash_key: "rightsIdentifier", description: "A short, standardized version of the license name." - field :rights_identifier_scheme, String, null: true, hash_key: "rightsIdentifierScheme", description: "The name of the scheme." - field :scheme_uri, String, null: true, hash_key: "schemeUri", description: "The URI of the rightsIdentifierScheme." + field :rights, + String, + null: true, description: "Any rights information for this resource." + field :rights_uri, + String, + null: true, + hash_key: "rightsUri", + description: "The URI of the license." + field :rights_identifier, + String, + null: true, + hash_key: "rightsIdentifier", + description: "A short, standardized version of the license name." + field :rights_identifier_scheme, + String, + null: true, + hash_key: "rightsIdentifierScheme", + description: "The name of the scheme." + field :scheme_uri, + String, + null: true, + hash_key: "schemeUri", + description: "The URI of the rightsIdentifierScheme." field :lang, String, null: true, description: "Language" end diff --git a/app/graphql/types/service_connection_with_total_type.rb b/app/graphql/types/service_connection_with_total_type.rb index 5d9259600..00fe39fdc 100644 --- a/app/graphql/types/service_connection_with_total_type.rb +++ b/app/graphql/types/service_connection_with_total_type.rb @@ -23,7 +23,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/software_application_type.rb b/app/graphql/types/software_application_type.rb index c089d4bed..eec5195ad 100644 --- a/app/graphql/types/software_application_type.rb +++ b/app/graphql/types/software_application_type.rb @@ -4,7 +4,11 @@ class SoftwareApplicationType < BaseObject description "A software application." field :name, String, null: true, description: "The name of the item." - field :description, String, null: true, description: "A description of the item." - field :software_version, String, null: true, description: "Version of the software instance." + field :description, + String, + null: true, description: "A description of the item." + field :software_version, + String, + null: true, description: "Version of the software instance." field :url, String, null: true, description: "URL of the item." end diff --git a/app/graphql/types/software_connection_with_total_type.rb b/app/graphql/types/software_connection_with_total_type.rb index 984ec445a..2fe83ff3d 100644 --- a/app/graphql/types/software_connection_with_total_type.rb +++ b/app/graphql/types/software_connection_with_total_type.rb @@ -29,7 +29,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories @@ -53,26 +55,59 @@ def fields_of_science end def software_connection_count - Event.query(nil, citation_type: "SoftwareSourceCode-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "SoftwareSourceCode-SoftwareSourceCode", + page: { number: 1, size: 0 }, + ). + results. + total end def publication_connection_count - Event.query(nil, citation_type: "ScholarlyArticle-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "ScholarlyArticle-SoftwareSourceCode", + page: { number: 1, size: 0 }, + ). + results. + total end def dataset_connection_count - Event.query(nil, citation_type: "Dataset-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Dataset-SoftwareSourceCode", page: { number: 1, size: 0 }, + ). + results. + total end def person_connection_count - Event.query(nil, citation_type: "Person-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Person-SoftwareSourceCode", page: { number: 1, size: 0 }, + ). + results. + total end def funder_connection_count - Event.query(nil, citation_type: "Funder-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Funder-SoftwareSourceCode", page: { number: 1, size: 0 }, + ). + results. + total end def organization_connection_count - Event.query(nil, citation_type: "Organization-SoftwareSourceCode", page: { number: 1, size: 0 }).results.total + Event.query( + nil, + citation_type: "Organization-SoftwareSourceCode", + page: { number: 1, size: 0 }, + ). + results. + total end end diff --git a/app/graphql/types/sound_connection_with_total_type.rb b/app/graphql/types/sound_connection_with_total_type.rb index 8cc294564..c42394a59 100644 --- a/app/graphql/types/sound_connection_with_total_type.rb +++ b/app/graphql/types/sound_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/subject_type.rb b/app/graphql/types/subject_type.rb index 5d04d3f4a..8f7d11fcd 100644 --- a/app/graphql/types/subject_type.rb +++ b/app/graphql/types/subject_type.rb @@ -3,9 +3,26 @@ class SubjectType < BaseObject description "Subject information" - field :subject, String, null: true, description: "Subject, keyword, classification code, or key phrase describing the resource" - field :subject_scheme, String, null: true, hash_key: "subjectScheme", description: "The name of the subject scheme or classification code or authority if one is used" - field :scheme_uri, String, null: true, hash_key: "schemeUri", description: "The URI of the subject identifier scheme" - field :value_uri, String, null: true, hash_key: "valueUri", description: "The URI of the subject term" + field :subject, + String, + null: true, + description: + "Subject, keyword, classification code, or key phrase describing the resource" + field :subject_scheme, + String, + null: true, + hash_key: "subjectScheme", + description: + "The name of the subject scheme or classification code or authority if one is used" + field :scheme_uri, + String, + null: true, + hash_key: "schemeUri", + description: "The URI of the subject identifier scheme" + field :value_uri, + String, + null: true, + hash_key: "valueUri", + description: "The URI of the subject term" field :lang, ID, null: true, description: "Language" end diff --git a/app/graphql/types/title_type.rb b/app/graphql/types/title_type.rb index 81fac5956..cb45c311f 100644 --- a/app/graphql/types/title_type.rb +++ b/app/graphql/types/title_type.rb @@ -4,6 +4,8 @@ class TitleType < BaseObject description "Information about titles" field :title, String, null: true, description: "Title" - field :title_type, String, null: true, hash_key: "titleType", description: "Title type" + field :title_type, + String, + null: true, hash_key: "titleType", description: "Title type" field :lang, ID, null: true, description: "Language" end diff --git a/app/graphql/types/usage_report_type.rb b/app/graphql/types/usage_report_type.rb index 86e102a31..a9bb25387 100644 --- a/app/graphql/types/usage_report_type.rb +++ b/app/graphql/types/usage_report_type.rb @@ -4,24 +4,36 @@ class UsageReportType < BaseObject description "Information about usage reports" field :id, ID, null: false, description: "Usage report ID" - field :repository_id, String, null: true, description: "Repository that created the report" - field :reporting_period, ReportingPeriodType, null: false, description: "Time period covered by the report" - field :date_created, String, null: false, description: "Date information was created" - field :datasets, UsageReportDatasetConnectionWithTotalType, null: false, description: "Datasets included in usage report", connection: true do + field :repository_id, + String, + null: true, description: "Repository that created the report" + field :reporting_period, + ReportingPeriodType, + null: false, description: "Time period covered by the report" + field :date_created, + String, + null: false, description: "Date information was created" + field :datasets, + UsageReportDatasetConnectionWithTotalType, + null: false, + description: "Datasets included in usage report", + connection: true do argument :first, Int, required: false, default_value: 25 end def datasets(**_args) - ids = Event.query(nil, subj_id: object[:id]).results.to_a.map do |e| - doi_from_url(e[:obj_id]) - end + ids = + Event.query(nil, subj_id: object[:id]).results.to_a.map do |e| + doi_from_url(e[:obj_id]) + end ElasticsearchLoader.for(Doi).load_many(ids) end def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end end diff --git a/app/graphql/types/work_connection_with_total_type.rb b/app/graphql/types/work_connection_with_total_type.rb index 3ef7a3d2d..5eba7cd99 100644 --- a/app/graphql/types/work_connection_with_total_type.rb +++ b/app/graphql/types/work_connection_with_total_type.rb @@ -5,7 +5,8 @@ class WorkConnectionWithTotalType < BaseConnection field_class GraphQL::Cache::Field field :total_count, Integer, null: false, cache: true - field :totalCountFromCrossref, resolver: TotalCountFromCrossref, null: true, cache: true + field :totalCountFromCrossref, + resolver: TotalCountFromCrossref, null: true, cache: true field :published, [FacetType], null: true, cache: true field :resource_types, [FacetType], null: true, cache: true field :registration_agencies, [FacetType], null: true, cache: true @@ -28,7 +29,9 @@ def resource_types end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/workflow_connection_with_total_type.rb b/app/graphql/types/workflow_connection_with_total_type.rb index 7c4cf695f..28c727394 100644 --- a/app/graphql/types/workflow_connection_with_total_type.rb +++ b/app/graphql/types/workflow_connection_with_total_type.rb @@ -21,7 +21,9 @@ def published end def registration_agencies - facet_by_registration_agency(object.aggregations.registration_agencies.buckets) + facet_by_registration_agency( + object.aggregations.registration_agencies.buckets, + ) end def repositories diff --git a/app/graphql/types/year_month_total_type.rb b/app/graphql/types/year_month_total_type.rb index ddba9a478..c045351f6 100644 --- a/app/graphql/types/year_month_total_type.rb +++ b/app/graphql/types/year_month_total_type.rb @@ -3,6 +3,8 @@ class YearMonthTotalType < BaseObject description "Information about totals over time (years)" - field :year_month, String, null: true, hash_key: :yearMonth, description: "Year-month" + field :year_month, + String, + null: true, hash_key: :yearMonth, description: "Year-month" field :total, Int, null: true, description: "Total" end diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb index de6be7945..29b57cbc1 100644 --- a/app/helpers/application_helper.rb +++ b/app/helpers/application_helper.rb @@ -1,2 +1,3 @@ -module ApplicationHelper -end +# frozen_string_literal: true + +module ApplicationHelper; end diff --git a/app/jobs/activity_convert_affiliation_by_id_job.rb b/app/jobs/activity_convert_affiliation_by_id_job.rb index c67598d06..04e34626f 100644 --- a/app/jobs/activity_convert_affiliation_by_id_job.rb +++ b/app/jobs/activity_convert_affiliation_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class ActivityConvertAffiliationByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/activity_import_by_id_job.rb b/app/jobs/activity_import_by_id_job.rb index 21f9588b9..de145e19e 100644 --- a/app/jobs/activity_import_by_id_job.rb +++ b/app/jobs/activity_import_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class ActivityImportByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/affiliation_job.rb b/app/jobs/affiliation_job.rb index f8cff4ad0..5de92542b 100644 --- a/app/jobs/affiliation_job.rb +++ b/app/jobs/affiliation_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class AffiliationJob < ApplicationJob queue_as :lupo_background @@ -5,19 +7,26 @@ def perform(doi_id) doi = Doi.where(doi: doi_id).first if doi.present? - new_creators = Array.wrap(doi.creators).map do |c| - c["affiliation"] = { "name" => c["affiliation"] } if c["affiliation"].is_a?(String) - c - end - new_contributors = Array.wrap(doi.contributors).map do |c| - c["affiliation"] = { "name" => c["affiliation"] } if c["affiliation"].is_a?(String) - c - end + new_creators = + Array.wrap(doi.creators).map do |c| + if c["affiliation"].is_a?(String) + c["affiliation"] = { "name" => c["affiliation"] } + end + c + end + new_contributors = + Array.wrap(doi.contributors).map do |c| + if c["affiliation"].is_a?(String) + c["affiliation"] = { "name" => c["affiliation"] } + end + c + end doi.update(creators: new_creators, contributors: new_contributors) doi.__elasticsearch__.index_document else - Rails.logger.error "[Affiliation] Error updating DOI " + doi_id + ": not found" + Rails.logger.error "[Affiliation] Error updating DOI " + doi_id + + ": not found" end end end diff --git a/app/jobs/application_job.rb b/app/jobs/application_job.rb index a009ace51..82047f33b 100644 --- a/app/jobs/application_job.rb +++ b/app/jobs/application_job.rb @@ -1,2 +1,3 @@ -class ApplicationJob < ActiveJob::Base -end +# frozen_string_literal: true + +class ApplicationJob < ActiveJob::Base; end diff --git a/app/jobs/camelcase_nested_objects_by_id_job.rb b/app/jobs/camelcase_nested_objects_by_id_job.rb index 2c5b1ce71..94643dfb1 100644 --- a/app/jobs/camelcase_nested_objects_by_id_job.rb +++ b/app/jobs/camelcase_nested_objects_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class CamelcaseNestedObjectsByIdJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/datacite_doi_import_by_id_job.rb b/app/jobs/datacite_doi_import_by_id_job.rb index 4e2794ed8..3c82b1077 100644 --- a/app/jobs/datacite_doi_import_by_id_job.rb +++ b/app/jobs/datacite_doi_import_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class DataciteDoiImportByIdJob < ApplicationJob queue_as :lupo_import - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/delete_event_by_attribute_job.rb b/app/jobs/delete_event_by_attribute_job.rb index 7cf1fa6d8..acd8d523a 100644 --- a/app/jobs/delete_event_by_attribute_job.rb +++ b/app/jobs/delete_event_by_attribute_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DeleteEventByAttributeJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/delete_job.rb b/app/jobs/delete_job.rb index 02edf86eb..8cacb6273 100644 --- a/app/jobs/delete_job.rb +++ b/app/jobs/delete_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DeleteJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/doi_convert_affiliation_by_id_job.rb b/app/jobs/doi_convert_affiliation_by_id_job.rb index 328fa49e9..818cef51c 100644 --- a/app/jobs/doi_convert_affiliation_by_id_job.rb +++ b/app/jobs/doi_convert_affiliation_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class DoiConvertAffiliationByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/doi_convert_container_by_id_job.rb b/app/jobs/doi_convert_container_by_id_job.rb index e5b4a1c18..796937465 100644 --- a/app/jobs/doi_convert_container_by_id_job.rb +++ b/app/jobs/doi_convert_container_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class DoiConvertContainerByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/doi_job.rb b/app/jobs/doi_job.rb index 7c944fcdb..9d482ca3a 100644 --- a/app/jobs/doi_job.rb +++ b/app/jobs/doi_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DoiJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/doi_refresh_job.rb b/app/jobs/doi_refresh_job.rb index 7ab14ecd7..f955bf0a7 100644 --- a/app/jobs/doi_refresh_job.rb +++ b/app/jobs/doi_refresh_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DoiRefreshJob < ApplicationJob queue_as :lupo_background @@ -6,7 +8,8 @@ class DoiRefreshJob < ApplicationJob # discard_on ActiveJob::DeserializationError - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/event_import_by_id_job.rb b/app/jobs/event_import_by_id_job.rb index 74a3030cb..32d58155d 100644 --- a/app/jobs/event_import_by_id_job.rb +++ b/app/jobs/event_import_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class EventImportByIdJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/event_registrant_update_by_id_job.rb b/app/jobs/event_registrant_update_by_id_job.rb index 52fa2e396..89b649a33 100644 --- a/app/jobs/event_registrant_update_by_id_job.rb +++ b/app/jobs/event_registrant_update_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class EventRegistrantUpdateByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end @@ -11,41 +14,60 @@ def perform(id, _options = {}) case item.source_id when "datacite-crossref" - registrant_id = cached_get_crossref_member_id(item.obj_id) if cached_get_doi_ra(item.obj_id) == "Crossref" + if cached_get_doi_ra(item.obj_id) == "Crossref" + registrant_id = cached_get_crossref_member_id(item.obj_id) + end Rails.logger.info registrant_id if registrant_id == "crossref.citations" sleep(0.50) registrant_id = get_crossref_member_id(item.obj_id) end - obj = item.obj.merge("registrantId" => registrant_id) unless registrant_id.nil? + unless registrant_id.nil? + obj = item.obj.merge("registrantId" => registrant_id) + end Rails.logger.info obj.inspect item.update(obj: obj) if obj.present? when "crossref" - registrant_id = cached_get_crossref_member_id(item.subj_id) if cached_get_doi_ra(item.subj_id) == "Crossref" + if cached_get_doi_ra(item.subj_id) == "Crossref" + registrant_id = cached_get_crossref_member_id(item.subj_id) + end Rails.logger.info registrant_id if registrant_id == "crossref.citations" sleep(0.50) registrant_id = get_crossref_member_id(item.subj_id) end - subj = item.subj.merge("registrant_id" => registrant_id) unless registrant_id.nil? + unless registrant_id.nil? + subj = item.subj.merge("registrant_id" => registrant_id) + end Rails.logger.info subj.inspect item.update(subj: subj) if subj.present? end - Rails.logger.error item.errors.full_messages.map { |message| { title: message } } if item.errors.any? - Rails.logger.info "#{item.uuid} Updated" if item.errors.blank? && registrant_id + if item.errors.any? + Rails.logger.error item.errors.full_messages.map { |message| + { title: message } + } + end + if item.errors.blank? && registrant_id + Rails.logger.info "#{item.uuid} Updated" + end end def get_crossref_member_id(id, _options = {}) doi = doi_from_url(id) # return "crossref.citations" unless doi.present? - url = "https://api.crossref.org/works/#{Addressable::URI.encode(doi)}?mailto=info@datacite.org" + url = + "https://api.crossref.org/works/#{ + Addressable::URI.encode(doi) + }?mailto=info@datacite.org" sleep(0.24) # to avoid crossref rate limitting - response = Maremma.get(url, host: true) - Rails.logger.info "[Crossref Response] [#{response.status}] for DOI #{doi} metadata" + response = Maremma.get(url, host: true) + Rails.logger.info "[Crossref Response] [#{response.status}] for DOI #{ + doi + } metadata" return "" if response.status == 404 ### for cases when DOI is not in the crossreaf api return "crossref.citations" if response.status != 200 ### for cases any other errors @@ -55,15 +77,11 @@ def get_crossref_member_id(id, _options = {}) end def cached_get_doi_ra(doi) - Rails.cache.fetch("ras/#{doi}") do - get_doi_ra(doi) - end + Rails.cache.fetch("ras/#{doi}") { get_doi_ra(doi) } end def cached_get_crossref_member_id(doi) - Rails.cache.fetch("members_ids/#{doi}") do - get_crossref_member_id(doi) - end + Rails.cache.fetch("members_ids/#{doi}") { get_crossref_member_id(doi) } end def get_doi_ra(doi) @@ -77,13 +95,18 @@ def get_doi_ra(doi) end def validate_prefix(doi) - Array(/\A(?:(http|https):\/(\/)?(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}).*\z/.match(doi)).last + Array( + %r{\A(?:(http|https):/(/)?(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}).*\z}. + match(doi), + ). + last end def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end end diff --git a/app/jobs/event_registrant_update_job.rb b/app/jobs/event_registrant_update_job.rb index a5b290da1..b6e87182f 100644 --- a/app/jobs/event_registrant_update_job.rb +++ b/app/jobs/event_registrant_update_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class EventRegistrantUpdateJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/handle_job.rb b/app/jobs/handle_job.rb index 1c6689df0..a92f091ac 100644 --- a/app/jobs/handle_job.rb +++ b/app/jobs/handle_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class HandleJob < ApplicationJob queue_as :lupo @@ -12,7 +14,8 @@ def perform(doi_id) if doi.present? doi.register_url else - Rails.logger.info "[Handle] Error updating URL for DOI " + doi_id + ": not found." + Rails.logger.info "[Handle] Error updating URL for DOI " + doi_id + + ": not found." end end end diff --git a/app/jobs/import_doi_job.rb b/app/jobs/import_doi_job.rb index 6a4b9bca1..4c987fd92 100644 --- a/app/jobs/import_doi_job.rb +++ b/app/jobs/import_doi_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ImportDoiJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/index_background_job.rb b/app/jobs/index_background_job.rb index 2a5ae6e34..90aa9447a 100644 --- a/app/jobs/index_background_job.rb +++ b/app/jobs/index_background_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class IndexBackgroundJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/index_job.rb b/app/jobs/index_job.rb index e7649c755..8fcdeec37 100644 --- a/app/jobs/index_job.rb +++ b/app/jobs/index_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class IndexJob < ApplicationJob queue_as :lupo - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/loop_through_dois_job.rb b/app/jobs/loop_through_dois_job.rb index caa866263..afe994ed0 100644 --- a/app/jobs/loop_through_dois_job.rb +++ b/app/jobs/loop_through_dois_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class LoopThroughDoisJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/orcid_auto_update_by_id_job.rb b/app/jobs/orcid_auto_update_by_id_job.rb index 3d48c4eb7..36e4e3bb0 100644 --- a/app/jobs/orcid_auto_update_by_id_job.rb +++ b/app/jobs/orcid_auto_update_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OrcidAutoUpdateByIdJob < ApplicationJob queue_as :lupo_background @@ -30,43 +32,42 @@ def perform(id, options = {}) message = response.body.fetch("data", {}) attributes = parse_message(message: message) - data = { - "data" => { - "type" => "researchers", - "attributes" => attributes, - }, - } + data = { "data" => { "type" => "researchers", "attributes" => attributes } } url = "http://localhost/researchers/#{orcid}" - response = Maremma.put(url, accept: "application/vnd.api+json", - content_type: "application/vnd.api+json", - data: data.to_json, - username: ENV["ADMIN_USERNAME"], - password: ENV["ADMIN_PASSWORD"]) + response = + Maremma.put( + url, + accept: "application/vnd.api+json", + content_type: "application/vnd.api+json", + data: data.to_json, + username: ENV["ADMIN_USERNAME"], + password: ENV["ADMIN_PASSWORD"], + ) if [200, 201].include?(response.status) Rails.logger.info "ORCID #{orcid} added." else - Rails.logger.error "[Error for ORCID #{orcid}]: " + response.body["errors"].inspect + Rails.logger.error "[Error for ORCID #{orcid}]: " + + response.body["errors"].inspect end end def parse_message(message: nil) given_name = message.dig("name", "given-names", "value") family_name = message.dig("name", "family-name", "value") - name = if message.dig("name", "credit-name", "value").present? - message.dig("name", "credit-name", "value") - elsif given_name.present? || family_name.present? - [given_name, family_name].join(" ") - end + name = + if message.dig("name", "credit-name", "value").present? + message.dig("name", "credit-name", "value") + elsif given_name.present? || family_name.present? + [given_name, family_name].join(" ") + end { - "name" => name, - "givenName" => given_names, - "familyName" => family_name, + "name" => name, "givenName" => given_names, "familyName" => family_name }.compact end def orcid_from_url(url) - Array(/\A(http|https):\/\/orcid\.org\/(.+)/.match(url)).last + Array(%r{\A(http|https)://orcid\.org/(.+)}.match(url)).last end end diff --git a/app/jobs/orcid_auto_update_job.rb b/app/jobs/orcid_auto_update_job.rb index 4436aa5b4..87b00a43b 100644 --- a/app/jobs/orcid_auto_update_job.rb +++ b/app/jobs/orcid_auto_update_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OrcidAutoUpdateJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/other_doi_by_id_job.rb b/app/jobs/other_doi_by_id_job.rb index f3f9ebc00..218b9442c 100644 --- a/app/jobs/other_doi_by_id_job.rb +++ b/app/jobs/other_doi_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OtherDoiByIdJob < ApplicationJob queue_as :lupo_background @@ -6,7 +8,8 @@ class OtherDoiByIdJob < ApplicationJob # discard_on ActiveJob::DeserializationError - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/other_doi_import_by_id_job.rb b/app/jobs/other_doi_import_by_id_job.rb index ca5d8c8e4..31cb0ca81 100644 --- a/app/jobs/other_doi_import_by_id_job.rb +++ b/app/jobs/other_doi_import_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class OtherDoiImportByIdJob < ApplicationJob queue_as :lupo_import_other_doi - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/other_doi_job.rb b/app/jobs/other_doi_job.rb index edcb366e6..f990dac34 100644 --- a/app/jobs/other_doi_job.rb +++ b/app/jobs/other_doi_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OtherDoiJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/other_doi_refresh_job.rb b/app/jobs/other_doi_refresh_job.rb index e24a4baf4..21070b71d 100644 --- a/app/jobs/other_doi_refresh_job.rb +++ b/app/jobs/other_doi_refresh_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OtherDoiRefreshJob < ApplicationJob queue_as :lupo_background @@ -6,7 +8,8 @@ class OtherDoiRefreshJob < ApplicationJob # discard_on ActiveJob::DeserializationError - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/schema_version_job.rb b/app/jobs/schema_version_job.rb index d2de451d3..51ecdf2c8 100644 --- a/app/jobs/schema_version_job.rb +++ b/app/jobs/schema_version_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class SchemaVersionJob < ApplicationJob queue_as :lupo_background @@ -9,11 +11,20 @@ def perform(id, _options = {}) metadata = xml.present? ? parse_xml(xml, doi: id) : {} if doi.blank? || metadata["schema_version"].blank? - Rails.logger.error "[SchemaVersion] Error updating schema_version for DOI " + id + ": not found" + Rails. + logger.error "[SchemaVersion] Error updating schema_version for DOI " + + id + + ": not found" elsif doi.update(schema_version: metadata["schema_version"]) - Rails.logger.info "[SchemaVersion] Successfully updated schema_version for DOI " + id + Rails. + logger.info "[SchemaVersion] Successfully updated schema_version for DOI " + + id else - Rails.logger.error "[SchemaVersion] Error updating schema_version for DOI " + id + ": " + errors.inspect + Rails. + logger.error "[SchemaVersion] Error updating schema_version for DOI " + + id + + ": " + + errors.inspect end end end diff --git a/app/jobs/subj_check_by_id_job.rb b/app/jobs/subj_check_by_id_job.rb index 320729604..38024995b 100644 --- a/app/jobs/subj_check_by_id_job.rb +++ b/app/jobs/subj_check_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class SubjCheckByIdJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/subj_check_job.rb b/app/jobs/subj_check_job.rb index 4c419e54a..be75c9e19 100644 --- a/app/jobs/subj_check_job.rb +++ b/app/jobs/subj_check_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class SubjCheckJob < ApplicationJob queue_as :lupo_background diff --git a/app/jobs/target_doi_by_id_job.rb b/app/jobs/target_doi_by_id_job.rb index 88e5070b7..0406a0dc8 100644 --- a/app/jobs/target_doi_by_id_job.rb +++ b/app/jobs/target_doi_by_id_job.rb @@ -1,7 +1,10 @@ +# frozen_string_literal: true + class TargetDoiByIdJob < ApplicationJob queue_as :lupo_background - rescue_from ActiveJob::DeserializationError, Elasticsearch::Transport::Transport::Errors::BadRequest do |error| + rescue_from ActiveJob::DeserializationError, + Elasticsearch::Transport::Transport::Errors::BadRequest do |error| Rails.logger.error error.message end diff --git a/app/jobs/transfer_client_job.rb b/app/jobs/transfer_client_job.rb index 0856933a8..184727b17 100644 --- a/app/jobs/transfer_client_job.rb +++ b/app/jobs/transfer_client_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class TransferClientJob < ApplicationJob queue_as :lupo_background @@ -14,9 +16,12 @@ def perform(client, options = {}) Doi.loop_through_dois(options) - Rails.logger.info "[Transfer] DOIs updating has started for #{symbol} to #{options[:provider_target_id]}." + Rails.logger.info "[Transfer] DOIs updating has started for #{ + symbol + } to #{options[:provider_target_id]}." else - Rails.logger.error "[Transfer] Error updating DOIs " + symbol + ": not found" + Rails.logger.error "[Transfer] Error updating DOIs " + symbol + + ": not found" end end end diff --git a/app/jobs/transfer_job.rb b/app/jobs/transfer_job.rb index b2b370858..766893828 100644 --- a/app/jobs/transfer_job.rb +++ b/app/jobs/transfer_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class TransferJob < ApplicationJob queue_as :lupo_transfer @@ -9,20 +11,21 @@ class TransferJob < ApplicationJob def perform(doi_id, options = {}) doi = Doi.where(doi: doi_id).first + # Success starts as true because update_attributes only returns false on error. + if doi.present? && options[:client_target_id].present? - # Success starts as true because update_attributes only returns false on error. success = true success = doi.update(datacentre: options[:client_target_id]) - if success - __elasticsearch__.index_document - end + __elasticsearch__.index_document if success Rails.logger.info "[Transfer] Transferred DOI #{doi.doi}." elsif doi.present? - Rails.logger.error "[Transfer] Error transferring DOI " + doi_id + ": no target client" + Rails.logger.error "[Transfer] Error transferring DOI " + doi_id + + ": no target client" else - Rails.logger.error "[Transfer] Error transferring DOI " + doi_id + ": not found" + Rails.logger.error "[Transfer] Error transferring DOI " + doi_id + + ": not found" end end end diff --git a/app/jobs/update_doi_job.rb b/app/jobs/update_doi_job.rb index 4e3511125..cb8fd5f6d 100644 --- a/app/jobs/update_doi_job.rb +++ b/app/jobs/update_doi_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class UpdateDoiJob < ApplicationJob queue_as :lupo_background @@ -5,11 +7,13 @@ def perform(doi_id, _options = {}) doi = Doi.where(doi: doi_id).first if doi.blank? - Rails.logger.error "[UpdateDoi] Error updating DOI " + doi_id + ": not found" + Rails.logger.error "[UpdateDoi] Error updating DOI " + doi_id + + ": not found" elsif doi.update(version: doi.version.to_i + 1) Rails.logger.debug "[UpdateDoi] Successfully updated DOI " + doi_id else - Rails.logger.error "[UpdateDoi] Error updating DOI " + doi_id + ": " + doi.errors.messages.inspect + Rails.logger.error "[UpdateDoi] Error updating DOI " + doi_id + ": " + + doi.errors.messages.inspect end end end diff --git a/app/jobs/update_provider_id_job.rb b/app/jobs/update_provider_id_job.rb index fcc18d6c3..15b221461 100644 --- a/app/jobs/update_provider_id_job.rb +++ b/app/jobs/update_provider_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class UpdateProviderIdJob < ApplicationJob queue_as :lupo_transfer @@ -14,9 +16,11 @@ def perform(doi_id, options = {}) Rails.logger.warn "[Transfer] updated DOI #{doi.doi}." elsif doi.present? - Rails.logger.error "[Transfer] Error updateding DOI " + doi_id + ": no target client" + Rails.logger.error "[Transfer] Error updateding DOI " + doi_id + + ": no target client" else - Rails.logger.error "[Transfer] Error updateding DOI " + doi_id + ": not found" + Rails.logger.error "[Transfer] Error updateding DOI " + doi_id + + ": not found" end end end diff --git a/app/jobs/update_state_job.rb b/app/jobs/update_state_job.rb index a5acd380e..f7cca4afa 100644 --- a/app/jobs/update_state_job.rb +++ b/app/jobs/update_state_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class UpdateStateJob < ApplicationJob queue_as :lupo_background @@ -5,11 +7,14 @@ def perform(doi_id, options = {}) doi = Doi.where(doi: doi_id).first if doi.blank? - Rails.logger.error "[State] Error updating state for DOI " + doi_id + ": not found" + Rails.logger.error "[State] Error updating state for DOI " + doi_id + + ": not found" elsif doi.update(aasm_state: options[:state]) Rails.logger.info "[State] Successfully updated state for DOI " + doi_id else - Rails.logger.error "[State] Error updating state for DOI " + doi_id + ": " + errors.inspect + Rails.logger.error "[State] Error updating state for DOI " + doi_id + + ": " + + errors.inspect end end end diff --git a/app/jobs/url_job.rb b/app/jobs/url_job.rb index 483bc42b1..75a3272a6 100644 --- a/app/jobs/url_job.rb +++ b/app/jobs/url_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class UrlJob < ApplicationJob queue_as :lupo @@ -11,9 +13,18 @@ def perform(doi_id) if doi.present? response = Doi.get_doi(doi: doi.doi, agency: doi.agency) - url = response.is_a?(String) ? nil : response.body.dig("data", "values", 0, "data", "value") + url = + if response.is_a?(String) + nil + else + response.body.dig("data", "values", 0, "data", "value") + end + if url.present? - if (doi.is_registered_or_findable? || %w(europ).include?(doi.provider_id)) && doi.minted.blank? + if ( + doi.is_registered_or_findable? || %w[europ].include?(doi.provider_id) + ) && + doi.minted.blank? doi.update(url: url, minted: Time.zone.now) else doi.update(url: url) @@ -23,12 +34,22 @@ def perform(doi_id) doi.__elasticsearch__.index_document - Rails.logger.info "[Handle] URL #{url} set for DOI #{doi.doi}." unless Rails.env.test? + unless Rails.env.test? + Rails.logger.info "[Handle] URL #{url} set for DOI #{doi.doi}." + end else - Rails.logger.info "[Handle] Error updating URL for DOI #{doi.doi}: URL not found." unless Rails.env.test? + unless Rails.env.test? + Rails.logger.info "[Handle] Error updating URL for DOI #{ + doi.doi + }: URL not found." + end end else - Rails.logger.info "[Handle] Error updating URL for DOI #{doi_id}: DOI not found" unless Rails.env.test? + unless Rails.env.test? + Rails.logger.info "[Handle] Error updating URL for DOI #{ + doi_id + }: DOI not found" + end end end end diff --git a/app/models/ability.rb b/app/models/ability.rb index ef4ee6d1c..96e859f4c 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Ability include CanCan::Ability @@ -11,8 +13,12 @@ def initialize(user) if user.role_id == "staff_admin" can :manage, :all - cannot [:new, :create], Doi do |doi| - doi.client.blank? || !(doi.client.prefixes.where(uid: doi.prefix).first || doi.type == "OtherDoi") + cannot %i[new create], Doi do |doi| + doi.client.blank? || + !( + doi.client.prefixes.where(uid: doi.prefix).first || + doi.type == "OtherDoi" + ) end can :export, :contacts can :export, :organizations @@ -20,17 +26,21 @@ def initialize(user) elsif user.role_id == "staff_user" can :read, :all elsif user.role_id == "consortium_admin" && user.provider_id.present? - can [:manage, :read_billing_information], Provider do |provider| + can %i[manage read_billing_information], Provider do |provider| user.provider_id.casecmp(provider.consortium_id) end - can %i[update read read_billing_information], Provider, symbol: user.provider_id.upcase - can [:manage], ProviderPrefix do |provider_prefix| - provider_prefix.provider && user.provider_id.casecmp(provider_prefix.provider.consortium_id) + can %i[update read read_billing_information], + Provider, + symbol: user.provider_id.upcase + can %i[manage], ProviderPrefix do |provider_prefix| + provider_prefix.provider && + user.provider_id.casecmp(provider_prefix.provider.consortium_id) end - can [:manage, :transfer], Client do |client| - client.provider && user.provider_id.casecmp(client.provider.consortium_id) + can %i[manage transfer], Client do |client| + client.provider && + user.provider_id.casecmp(client.provider.consortium_id) end - can [:manage], ClientPrefix # , :client_id => user.provider_id + can %i[manage], ClientPrefix # , :client_id => user.provider_id # if Flipper[:delete_doi].enabled?(user) # can [:manage], Doi, :provider_id => user.provider_id @@ -38,21 +48,25 @@ def initialize(user) # can [:read, :update], Doi, :provider_id => user.provider_id # end - can [:read, :get_url, :transfer, :read_landing_page_results], Doi do |doi| + can %i[read get_url transfer read_landing_page_results], Doi do |doi| user.provider_id.casecmp(doi.provider.consortium_id) end - can [:read], Doi - can [:read], User - can [:read], Phrase - can [:read], Activity do |activity| - activity.doi.findable? || activity.doi.provider && user.provider_id.casecmp(activity.doi.provider.consortium_id) + can %i[read], Doi + can %i[read], User + can %i[read], Phrase + can %i[read], Activity do |activity| + activity.doi.findable? || + activity.doi.provider && + user.provider_id.casecmp(activity.doi.provider.consortium_id) end elsif user.role_id == "provider_admin" && user.provider_id.present? - can %i[update read read_billing_information], Provider, symbol: user.provider_id.upcase - can [:manage], ProviderPrefix, provider_id: user.provider_id - can [:manage], Client, provider_id: user.provider_id - cannot [:transfer], Client - can [:manage], ClientPrefix # , :client_id => user.provider_id + can %i[update read read_billing_information], + Provider, + symbol: user.provider_id.upcase + can %i[manage], ProviderPrefix, provider_id: user.provider_id + can %i[manage], Client, provider_id: user.provider_id + cannot %i[transfer], Client + can %i[manage], ClientPrefix # , :client_id => user.provider_id # if Flipper[:delete_doi].enabled?(user) # can [:manage], Doi, :provider_id => user.provider_id @@ -60,30 +74,36 @@ def initialize(user) # can [:read, :update], Doi, :provider_id => user.provider_id # end - can %i[read get_url transfer read_landing_page_results], Doi, provider_id: user.provider_id - can [:read], Doi - can [:read], User - can [:read], Phrase - can [:read], Activity do |activity| + can %i[read get_url transfer read_landing_page_results], + Doi, + provider_id: user.provider_id + can %i[read], Doi + can %i[read], User + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? || activity.doi.provider_id == user.provider_id end elsif user.role_id == "provider_user" && user.provider_id.present? - can %i[read read_billing_information], Provider, symbol: user.provider_id.upcase - can [:read], Provider - can [:read], ProviderPrefix, provider_id: user.provider_id - can [:read], Client, provider_id: user.provider_id - can [:read], ClientPrefix # , :client_id => user.client_id - can %i[read get_url read_landing_page_results], Doi, provider_id: user.provider_id - can [:read], Doi - can [:read], User - can [:read], Phrase - can [:read], Activity do |activity| + can %i[read read_billing_information], + Provider, + symbol: user.provider_id.upcase + can %i[read], Provider + can %i[read], ProviderPrefix, provider_id: user.provider_id + can %i[read], Client, provider_id: user.provider_id + can %i[read], ClientPrefix # , :client_id => user.client_id + can %i[read get_url read_landing_page_results], + Doi, + provider_id: user.provider_id + can %i[read], Doi + can %i[read], User + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? || activity.doi.provider_id == user.provider_id end elsif user.role_id == "client_admin" && user.client_id.present? - can [:read], Provider + can %i[read], Provider can %i[read update], Client, symbol: user.client_id.upcase - can [:read], ClientPrefix, client_id: user.client_id + can %i[read], ClientPrefix, client_id: user.client_id # if Flipper[:delete_doi].enabled?(user) # can [:manage], Doi, :client_id => user.client_id @@ -91,54 +111,77 @@ def initialize(user) # can [:read, :update], Doi, :client_id => user.client_id # end - can %i[read destroy update register_url validate undo get_url get_urls read_landing_page_results], Doi, client_id: user.client_id - can [:new, :create], Doi do |doi| - doi.client.prefixes.where(uid: doi.prefix).present? || doi.type == "OtherDoi" - end - can [:read], Doi - can [:read], User - can [:read], Phrase - can [:read], Activity do |activity| + can %i[ + read + destroy + update + register_url + validate + undo + get_url + get_urls + read_landing_page_results + ], + Doi, + client_id: user.client_id + can %i[new create], Doi do |doi| + doi.client.prefixes.where(uid: doi.prefix).present? || + doi.type == "OtherDoi" + end + can %i[read], Doi + can %i[read], User + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? || activity.doi.client_id == user.client_id end elsif user.role_id == "client_user" && user.client_id.present? - can [:read], Provider - can [:read], Client, symbol: user.client_id.upcase - can [:read], ClientPrefix, client_id: user.client_id - can %i[read get_url read_landing_page_results], Doi, client_id: user.client_id - can [:read], Doi - can [:read], User - can [:read], Phrase - can [:read], Activity do |activity| + can %i[read], Provider + can %i[read], Client, symbol: user.client_id.upcase + can %i[read], ClientPrefix, client_id: user.client_id + can %i[read get_url read_landing_page_results], + Doi, + client_id: user.client_id + can %i[read], Doi + can %i[read], User + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? || activity.doi.client_id == user.client_id end elsif user.role_id == "user" - can [:read], Provider - can [:update], Provider, symbol: user.provider_id.upcase if user.provider_id.present? - can %i[read update], Client, symbol: user.client_id.upcase if user.client_id.present? - can [:read], Doi, client_id: user.client_id if user.client_id.present? + can %i[read], Provider + if user.provider_id.present? + can %i[update], Provider, symbol: user.provider_id.upcase + end + if user.client_id.present? + can %i[read update], Client, symbol: user.client_id.upcase + end + can %i[read], Doi, client_id: user.client_id if user.client_id.present? can %i[read get_url], Doi - can [:read], User, id: user.id - can [:read], Phrase - can [:read], Activity do |activity| + can %i[read], User, id: user.id + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? end elsif user.role_id == "temporary" - can [:read], Provider - can [:update], Provider, symbol: "ADMIN" if user.uid == "admin" - can [:update], Provider, symbol: user.provider_id.upcase if user.provider_id.present? - can %i[read update], Client, symbol: user.client_id.upcase if user.client_id.present? - can [:read], Doi, client_id: user.client_id if user.client_id.present? + can %i[read], Provider + can %i[update], Provider, symbol: "ADMIN" if user.uid == "admin" + if user.provider_id.present? + can %i[update], Provider, symbol: user.provider_id.upcase + end + if user.client_id.present? + can %i[read update], Client, symbol: user.client_id.upcase + end + can %i[read], Doi, client_id: user.client_id if user.client_id.present? can %i[read get_url], Doi - can [:read], User, id: user.id - can [:read], Phrase - can [:read], Activity do |activity| + can %i[read], User, id: user.id + can %i[read], Phrase + can %i[read], Activity do |activity| activity.doi.findable? end elsif user.role_id == "anonymous" can %i[read get_url], Doi - can [:read], Provider - can [:read], Activity do |activity| + can %i[read], Provider + can %i[read], Activity do |activity| activity.doi.findable? end end diff --git a/app/models/activity.rb b/app/models/activity.rb index 86370dda0..475d8d285 100644 --- a/app/models/activity.rb +++ b/app/models/activity.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Activity < Audited::Audit include Elasticsearch::Model @@ -24,16 +26,16 @@ def after_audit end mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :auditable_id, type: :keyword - indexes :uid, type: :keyword - indexes :auditable_type, type: :keyword - indexes :username, type: :keyword - indexes :action, type: :keyword - indexes :version, type: :keyword - indexes :request_uuid, type: :keyword - indexes :changes, type: :object - indexes :created, type: :date, ignore_malformed: true + indexes :id, type: :keyword + indexes :auditable_id, type: :keyword + indexes :uid, type: :keyword + indexes :auditable_type, type: :keyword + indexes :username, type: :keyword + indexes :action, type: :keyword + indexes :version, type: :keyword + indexes :request_uuid, type: :keyword + indexes :changes, type: :object + indexes :created, type: :date, ignore_malformed: true end def as_indexed_json(_options = {}) @@ -55,7 +57,15 @@ def as_indexed_json(_options = {}) end def self.query_fields - ["uid^10", "username^5", "action", "changes", "was_derived_from", "was_attributed_to", "was_generated_by"] + %w[ + uid^10 + username^5 + action + changes + was_derived_from + was_attributed_to + was_generated_by + ] end def self.query_aggregations @@ -78,40 +88,62 @@ def self.import_by_id(options = {}) return nil if options[:id].blank? id = options[:id].to_i - index = if Rails.env.test? - "activities-test" - elsif options[:index].present? - options[:index] - else - inactive_index - end + index = + if Rails.env.test? + "activities-test" + elsif options[:index].present? + options[:index] + else + inactive_index + end errors = 0 count = 0 - Activity.where(id: id..(id + 499)).find_in_batches(batch_size: 500) do |activities| - response = Activity.__elasticsearch__.client.bulk \ - index: index, - type: Activity.document_type, - body: activities.map { |activity| { index: { _id: activity.id, data: activity.as_indexed_json } } } + Activity.where(id: id..(id + 499)).find_in_batches( + batch_size: 500, + ) do |activities| + response = + Activity.__elasticsearch__.client.bulk index: index, + type: Activity.document_type, + body: + activities.map { |activity| + { + index: { + _id: activity.id, + data: + activity. + as_indexed_json, + }, + } + } # log errors - errors += response["items"].map { |k, _v| k.values.first["error"] }.compact.length - response["items"].select { |k, _v| k.values.first["error"].present? }.each do |err| - Rails.logger.error "[Elasticsearch] " + err.inspect - end + errors += + response["items"].map { |k, _v| k.values.first["error"] }.compact.length + response["items"].select do |k, _v| + k.values.first["error"].present? + end.each { |err| Rails.logger.error "[Elasticsearch] " + err.inspect } count += activities.length end if errors > 1 - Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} activities with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[Elasticsearch] #{errors} errors importing #{ + count + } activities with IDs #{id} - #{id + 499}." elsif count.positive? - Rails.logger.info "[Elasticsearch] Imported #{count} activities with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{ + count + } activities with IDs #{id} - #{id + 499}." end count - rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.error "[Elasticsearch] Error #{e.message} importing activities with IDs #{id} - #{(id + 499)}." + rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, + Faraday::ConnectionFailed, + ActiveRecord::LockWaitTimeout => e + Rails.logger.error "[Elasticsearch] Error #{ + e.message + } importing activities with IDs #{id} - #{id + 499}." count = 0 @@ -120,7 +152,9 @@ def self.import_by_id(options = {}) count += 1 end - Rails.logger.info "[Elasticsearch] Imported #{count} activities with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} activities with IDs #{ + id + } - #{id + 499}." count end @@ -132,7 +166,11 @@ def self.convert_affiliations(options = {}) # get every id between from_id and end_id (from_id..until_id).step(500).each do |id| ActivityConvertAffiliationByIdJob.perform_later(options.merge(id: id)) - Logger.info "Queued converting affiliations for activities with IDs starting with #{id}." unless Rails.env.test? + unless Rails.env.test? + Logger.info "Queued converting affiliations for activities with IDs starting with #{ + id + }." + end end (from_id..until_id).to_a.length @@ -147,43 +185,45 @@ def self.convert_affiliation_by_id(options = {}) Activity.where(id: id..(id + 499)).find_each do |activity| should_update = false audited_changes = activity.audited_changes - creators = Array.wrap(audited_changes["creators"]).map do |c| - # c is an array if there are changes - return [] if c.blank? + creators = + Array.wrap(audited_changes["creators"]).map do |c| + return if c.blank? + + c = c.last if c.is_a?(Array) + + if c["affiliation"].nil? + c["affiliation"] = [] + should_update = true + elsif c["affiliation"].is_a?(String) + c["affiliation"] = [{ "name" => c["affiliation"] }] + should_update = true + else + c["affiliation"].is_a?(Hash) + c["affiliation"] = Array.wrap(c["affiliation"]) + should_update = true + end + + c + end + contributors = + Array.wrap(audited_changes["contributors"]).map do |c| + return if c.blank? - c = c.last if c.is_a?(Array) + c = c.last if c.is_a?(Array) - if c["affiliation"].nil? - c["affiliation"] = [] - should_update = true - elsif c["affiliation"].is_a?(String) - c["affiliation"] = [{ "name" => c["affiliation"] }] - should_update = true - else c["affiliation"].is_a?(Hash) - c["affiliation"] = Array.wrap(c["affiliation"]) - should_update = true - end + if c["affiliation"].nil? + c["affiliation"] = [] + elsif c["affiliation"].is_a?(String) + c["affiliation"] = [{ "name" => c["affiliation"] }] + else + c["affiliation"].is_a?(Hash) + c["affiliation"] = Array.wrap(c["affiliation"]) + end - c - end - contributors = Array.wrap(audited_changes["contributors"]).map do |c| - # c is an array if there are changes - return [] if c.blank? - - c = c.last if c.is_a?(Array) - - if c["affiliation"].nil? - c["affiliation"] = [] - elsif c["affiliation"].is_a?(String) - c["affiliation"] = [{ "name" => c["affiliation"] }] - else c["affiliation"].is_a?(Hash) - c["affiliation"] = Array.wrap(c["affiliation"]) + should_update = true + c end - should_update = true - c - end - if should_update audited_changes["creators"] = creators audited_changes["contributors"] = contributors @@ -192,11 +232,21 @@ def self.convert_affiliation_by_id(options = {}) end end - Rails.logger.info "[Elasticsearch] Converted affiliations for #{count} activities with IDs #{id} - #{(id + 499)}." if count > 0 + if count > 0 + Rails.logger.info "[Elasticsearch] Converted affiliations for #{ + count + } activities with IDs #{id} - #{id + 499}." + end count - rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.info "[Elasticsearch] Error #{e.message} converting affiliations for DOIs with IDs #{id} - #{(id + 499)}." + rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, + Faraday::ConnectionFailed, + ActiveRecord::LockWaitTimeout => e + Rails.logger.info "[Elasticsearch] Error #{ + e.message + } converting affiliations for DOIs with IDs #{id} - #{ + id + 499 + }." end def uid @@ -204,12 +254,21 @@ def uid end def url - Rails.env.production? ? "https://api.datacite.org" : "https://api.test.datacite.org" + if Rails.env.production? + "https://api.datacite.org" + else + "https://api.test.datacite.org" + end end def was_derived_from if auditable_type == "Doi" - handle_url = Rails.env.production? ? "https://doi.org/" : "https://handle.test.datacite.org/" + handle_url = + if Rails.env.production? + "https://doi.org/" + else + "https://handle.test.datacite.org/" + end handle_url + uid elsif auditable_type == "Provider" url + "/providers/" + uid @@ -220,7 +279,11 @@ def was_derived_from def was_attributed_to if username.present? - username.include?(".") ? url + "/repositories/" + username : url + "/providers/" + username + if username.include?(".") + url + "/repositories/" + username + else + url + "/providers/" + username + end end end diff --git a/app/models/application_record.rb b/app/models/application_record.rb index 10a4cba84..71fbba5b3 100644 --- a/app/models/application_record.rb +++ b/app/models/application_record.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ApplicationRecord < ActiveRecord::Base self.abstract_class = true end diff --git a/app/models/client.rb b/app/models/client.rb index 1c6fdc91c..959f687e9 100644 --- a/app/models/client.rb +++ b/app/models/client.rb @@ -1,5 +1,17 @@ +# frozen_string_literal: true + class Client < ApplicationRecord - audited except: %i[globus_uuid salesforce_id password updated comments experiments version doi_quota_allowed doi_quota_used] + audited except: %i[ + globus_uuid + salesforce_id + password + updated + comments + experiments + version + doi_quota_allowed + doi_quota_used + ] # include helper module for caching infrequently changing resources include Cacheable @@ -33,16 +45,28 @@ class Client < ApplicationRecord delegate :symbol, to: :provider, prefix: true delegate :consortium_id, to: :provider, allow_nil: true - attr_accessor :password_input + attr_accessor :password_input, :target_id validates_presence_of :symbol, :name, :system_email - validates_uniqueness_of :symbol, message: "This Client ID has already been taken" - validates_format_of :symbol, with: /\A([A-Z]+\.[A-Z0-9]+(-[A-Z0-9]+)?)\Z/, message: "should only contain capital letters, numbers, and at most one hyphen" + validates_uniqueness_of :symbol, + message: "This Client ID has already been taken" + validates_format_of :symbol, + with: /\A([A-Z]+\.[A-Z0-9]+(-[A-Z0-9]+)?)\Z/, + message: + "should only contain capital letters, numbers, and at most one hyphen" validates_length_of :symbol, minimum: 5, maximum: 18 - validates_format_of :system_email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i - validates_format_of :salesforce_id, with: /[a-zA-Z0-9]{18}/, message: "wrong format for salesforce id", if: :salesforce_id? - validates_inclusion_of :role_name, in: %w(ROLE_DATACENTRE), message: "Role %s is not included in the list" - validates_inclusion_of :client_type, in: %w(repository periodical), message: "Client type %s is not included in the list" + validates_format_of :system_email, + with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i + validates_format_of :salesforce_id, + with: /[a-zA-Z0-9]{18}/, + message: "wrong format for salesforce id", + if: :salesforce_id? + validates_inclusion_of :role_name, + in: %w[ROLE_DATACENTRE], + message: "Role %s is not included in the list" + validates_inclusion_of :client_type, + in: %w[repository periodical], + message: "Client type %s is not included in the list" validates_associated :provider validate :check_id, on: :create validate :freeze_symbol, on: :update @@ -63,8 +87,6 @@ class Client < ApplicationRecord before_create { self.created = Time.zone.now.utc.iso8601 } before_save { self.updated = Time.zone.now.utc.iso8601 } - attr_accessor :target_id - # use different index for testing if Rails.env.test? index_name "clients-test" @@ -77,140 +99,232 @@ class Client < ApplicationRecord settings index: { analysis: { analyzer: { - string_lowercase: { tokenizer: "keyword", filter: %w(lowercase ascii_folding) }, + string_lowercase: { + tokenizer: "keyword", filter: %w[lowercase ascii_folding] + }, }, normalizer: { - keyword_lowercase: { type: "custom", filter: %w(lowercase) }, + keyword_lowercase: { type: "custom", filter: %w[lowercase] }, }, filter: { - ascii_folding: { type: "asciifolding", preserve_original: true }, + ascii_folding: { + type: "asciifolding", preserve_original: true + }, }, }, } do mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :uid, type: :keyword, normalizer: "keyword_lowercase" - indexes :symbol, type: :keyword - indexes :provider_id, type: :keyword + indexes :id, type: :keyword + indexes :uid, type: :keyword, normalizer: "keyword_lowercase" + indexes :symbol, type: :keyword + indexes :provider_id, type: :keyword indexes :provider_id_and_name, type: :keyword indexes :consortium_id, type: :keyword - indexes :re3data_id, type: :keyword - indexes :opendoar_id, type: :integer + indexes :re3data_id, type: :keyword + indexes :opendoar_id, type: :integer indexes :salesforce_id, type: :keyword - indexes :globus_uuid, type: :keyword - indexes :issn, type: :object, properties: { - issnl: { type: :keyword }, - electronic: { type: :keyword }, - print: { type: :keyword }, - } - indexes :prefix_ids, type: :keyword - indexes :name, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", analyzer: "string_lowercase", "fielddata": true } } - indexes :alternate_name, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", analyzer: "string_lowercase", "fielddata": true } } - indexes :description, type: :text - indexes :system_email, type: :text, fields: { keyword: { type: "keyword" } } - indexes :service_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :certificate, type: :keyword - indexes :language, type: :keyword + indexes :globus_uuid, type: :keyword + indexes :issn, + type: :object, + properties: { + issnl: { type: :keyword }, + electronic: { type: :keyword }, + print: { type: :keyword }, + } + indexes :prefix_ids, type: :keyword + indexes :name, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", analyzer: "string_lowercase", "fielddata": true + }, + } + indexes :alternate_name, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", analyzer: "string_lowercase", "fielddata": true + }, + } + indexes :description, type: :text + indexes :system_email, + type: :text, fields: { keyword: { type: "keyword" } } + indexes :service_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :certificate, type: :keyword + indexes :language, type: :keyword indexes :repository_type, type: :keyword - indexes :version, type: :integer - indexes :is_active, type: :keyword - indexes :domains, type: :text - indexes :year, type: :integer - indexes :url, type: :text, fields: { keyword: { type: "keyword" } } - indexes :software, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", analyzer: "string_lowercase", "fielddata": true } } - indexes :cache_key, type: :keyword - indexes :client_type, type: :keyword - indexes :created, type: :date - indexes :updated, type: :date - indexes :deleted_at, type: :date + indexes :version, type: :integer + indexes :is_active, type: :keyword + indexes :domains, type: :text + indexes :year, type: :integer + indexes :url, type: :text, fields: { keyword: { type: "keyword" } } + indexes :software, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", analyzer: "string_lowercase", "fielddata": true + }, + } + indexes :cache_key, type: :keyword + indexes :client_type, type: :keyword + indexes :created, type: :date + indexes :updated, type: :date + indexes :deleted_at, type: :date indexes :cumulative_years, type: :integer, index: "false" # include parent objects - indexes :provider, type: :object, properties: { - id: { type: :keyword }, - uid: { type: :keyword }, - symbol: { type: :keyword }, - globus_uuid: { type: :keyword }, - client_ids: { type: :keyword }, - prefix_ids: { type: :keyword }, - name: { type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } }, - display_name: { type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } }, - system_email: { type: :text, fields: { keyword: { type: "keyword" } } }, - group_email: { type: :text, fields: { keyword: { type: "keyword" } } }, - version: { type: :integer }, - is_active: { type: :keyword }, - year: { type: :integer }, - description: { type: :text }, - website: { type: :text, fields: { keyword: { type: "keyword" } } }, - logo_url: { type: :text }, - region: { type: :keyword }, - focus_area: { type: :keyword }, - organization_type: { type: :keyword }, - member_type: { type: :keyword }, - consortium_id: { type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } }, - consortium_organization_ids: { type: :keyword }, - country_code: { type: :keyword }, - role_name: { type: :keyword }, - cache_key: { type: :keyword }, - joined: { type: :date }, - twitter_handle: { type: :keyword }, - ror_id: { type: :keyword }, - salesforce_id: { type: :keyword }, - billing_information: { type: :object, properties: { - postCode: { type: :keyword }, - state: { type: :text }, - organization: { type: :text }, - department: { type: :text }, - city: { type: :text }, - country: { type: :text }, - address: { type: :text }, - } }, - technical_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - secondary_technical_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - billing_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - secondary_billing_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - service_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - secondary_service_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - voting_contact: { type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } }, - created: { type: :date }, - updated: { type: :date }, - deleted_at: { type: :date }, - cumulative_years: { type: :integer, index: "false" }, - consortium: { type: :object }, - consortium_organizations: { type: :object }, - } + indexes :provider, + type: :object, + properties: { + id: { type: :keyword }, + uid: { type: :keyword }, + symbol: { type: :keyword }, + globus_uuid: { type: :keyword }, + client_ids: { type: :keyword }, + prefix_ids: { type: :keyword }, + name: { + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + }, + }, + display_name: { + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + }, + }, + system_email: { + type: :text, fields: { keyword: { type: "keyword" } } + }, + group_email: { + type: :text, fields: { keyword: { type: "keyword" } } + }, + version: { type: :integer }, + is_active: { type: :keyword }, + year: { type: :integer }, + description: { type: :text }, + website: { + type: :text, fields: { keyword: { type: "keyword" } } + }, + logo_url: { type: :text }, + region: { type: :keyword }, + focus_area: { type: :keyword }, + organization_type: { type: :keyword }, + member_type: { type: :keyword }, + consortium_id: { + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + }, + }, + consortium_organization_ids: { type: :keyword }, + country_code: { type: :keyword }, + role_name: { type: :keyword }, + cache_key: { type: :keyword }, + joined: { type: :date }, + twitter_handle: { type: :keyword }, + ror_id: { type: :keyword }, + salesforce_id: { type: :keyword }, + billing_information: { + type: :object, + properties: { + postCode: { type: :keyword }, + state: { type: :text }, + organization: { type: :text }, + department: { type: :text }, + city: { type: :text }, + country: { type: :text }, + address: { type: :text }, + }, + }, + technical_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + secondary_technical_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + billing_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + secondary_billing_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + service_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + secondary_service_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + voting_contact: { + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + }, + }, + created: { type: :date }, + updated: { type: :date }, + deleted_at: { type: :date }, + cumulative_years: { type: :integer, index: "false" }, + consortium: { type: :object }, + consortium_organizations: { type: :object }, + } end end @@ -248,24 +362,65 @@ def as_indexed_json(options = {}) "updated" => updated, "deleted_at" => deleted_at, "cumulative_years" => cumulative_years, - "provider" => options[:exclude_associations] ? nil : provider.as_indexed_json(exclude_associations: true), + "provider" => + if options[:exclude_associations] + nil + else + provider.as_indexed_json(exclude_associations: true) + end, } end def self.query_fields - ["uid^10", "symbol^10", "name^5", "description^5", "system_email^5", "url", "software^3", "repository.subjects.text^3", "repository.certificates.text^3", "_all"] + %w[ + uid^10 + symbol^10 + name^5 + description^5 + system_email^5 + url + software^3 + repository.subjects.text^3 + repository.certificates.text^3 + _all + ] end def self.query_aggregations { - years: { date_histogram: { field: "created", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 }, - aggs: { bucket_truncate: { bucket_sort: { size: 10 } } } }, - cumulative_years: { terms: { field: "cumulative_years", size: 20, min_doc_count: 1, order: { _count: "asc" } } }, - providers: { terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 } }, - software: { terms: { field: "software.keyword", size: 10, min_doc_count: 1 } }, - client_types: { terms: { field: "client_type", size: 10, min_doc_count: 1 } }, - repository_types: { terms: { field: "repository_type", size: 10, min_doc_count: 1 } }, - certificates: { terms: { field: "certificate", size: 10, min_doc_count: 1 } }, + years: { + date_histogram: { + field: "created", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, + }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, + }, + cumulative_years: { + terms: { + field: "cumulative_years", + size: 20, + min_doc_count: 1, + order: { _count: "asc" }, + }, + }, + providers: { + terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 }, + }, + software: { + terms: { field: "software.keyword", size: 10, min_doc_count: 1 }, + }, + client_types: { + terms: { field: "client_type", size: 10, min_doc_count: 1 }, + }, + repository_types: { + terms: { field: "repository_type", size: 10, min_doc_count: 1 }, + }, + certificates: { + terms: { field: "certificate", size: 10, min_doc_count: 1 }, + }, } end @@ -342,8 +497,13 @@ def transfer(provider_target_id: nil) return nil end - target_provider = Provider.where("role_name IN (?)", %w(ROLE_ALLOCATOR ROLE_CONSORTIUM_ORGANIZATION)). - where(symbol: provider_target_id).first + target_provider = + Provider.where( + "role_name IN (?)", + %w[ROLE_ALLOCATOR ROLE_CONSORTIUM_ORGANIZATION], + ). + where(symbol: provider_target_id). + first if target_provider.blank? Rails.logger.error "[Transfer] Provider doesn't exist." @@ -357,31 +517,49 @@ def transfer(provider_target_id: nil) transfer_prefixes(provider_target_id: target_provider.symbol) # Update DOIs - TransferClientJob.perform_later(self, provider_target_id: provider_target_id) + TransferClientJob.perform_later( + self, + provider_target_id: provider_target_id, + ) end # use keyword arguments consistently def transfer_prefixes(provider_target_id: nil) # These prefixes are used by multiple clients - prefixes_to_keep = ["10.4124", "10.4225", "10.4226", "10.4227"] + prefixes_to_keep = %w[10.4124 10.4225 10.4226 10.4227] # delete all associated prefixes - associated_prefixes = prefixes.reject { |prefix| prefixes_to_keep.include?(prefix.uid) } + associated_prefixes = + prefixes.reject { |prefix| prefixes_to_keep.include?(prefix.uid) } prefix_ids = associated_prefixes.pluck(:id) prefixes_names = associated_prefixes.pluck(:uid) if prefix_ids.present? - response = ProviderPrefix.where("prefix_id IN (?)", prefix_ids).destroy_all - Rails.logger.info "[Transfer] #{response.count} provider prefixes deleted." + response = + ProviderPrefix.where("prefix_id IN (?)", prefix_ids).destroy_all + Rails.logger.info "[Transfer] #{ + response.count + } provider prefixes deleted." end # Assign prefix(es) to provider and client prefixes_names.each do |prefix| - provider_prefix = ProviderPrefix.create(provider_id: provider_target_id, prefix_id: prefix) - Rails.logger.info "[Transfer] Provider prefix for provider #{provider_target_id} and prefix #{prefix} created." - - ClientPrefix.create(client_id: symbol, provider_prefix_id: provider_prefix.uid, prefix_id: prefix) - Rails.logger.info "Client prefix for client #{symbol} and prefix #{prefix} created." + provider_prefix = + ProviderPrefix.create( + provider_id: provider_target_id, prefix_id: prefix, + ) + Rails.logger.info "[Transfer] Provider prefix for provider #{ + provider_target_id + } and prefix #{prefix} created." + + ClientPrefix.create( + client_id: symbol, + provider_prefix_id: provider_prefix.uid, + prefix_id: prefix, + ) + Rails.logger.info "Client prefix for client #{symbol} and prefix #{ + prefix + } created." end end @@ -452,7 +630,7 @@ def to_jsonapi def self.export_doi_counts(query: nil) # Loop through all clients - page = { size: 1000, number: 1 } + page = { size: 1_000, number: 1 } response = self.query(query, page: page) clients = response.results.to_a @@ -462,21 +640,23 @@ def self.export_doi_counts(query: nil) # keep going for all pages page_num = 2 while page_num <= total_pages - page = { size: 1000, number: page_num } + page = { size: 1_000, number: page_num } response = self.query(query, page: page) clients = clients + response.results.to_a page_num += 1 end # Get doi counts via DOIs query and combine next to clients. - response = DataciteDoi.query(nil, page: { size: 0, number: 1 }, totals_agg: "client_export") + response = + DataciteDoi.query( + nil, + page: { size: 0, number: 1 }, totals_agg: "client_export", + ) client_totals = {} totals_buckets = response.aggregations.clients_totals.buckets totals_buckets.each do |totals| - client_totals[totals["key"]] = { - "count" => totals["doc_count"], - } + client_totals[totals["key"]] = { "count" => totals["doc_count"] } end headers = [ @@ -488,31 +668,34 @@ def self.export_doi_counts(query: nil) ] dois_by_client = DataciteDoi.group(:datacentre).count - rows = clients.reduce([]) do |sum, client| - db_total = dois_by_client.dig(client.id).to_i - es_total = client_totals[client.uid] ? client_totals[client.uid]["count"] : 0 - if (db_total - es_total) > 0 - # Limit for salesforce default of max 80 chars - name = +client.name.truncate(80) - # Clean the name to remove quotes, which can break csv parsers - name.gsub! /["']/, "" - - row = { - accountName: name, - fabricaAccountId: client.symbol, - parentFabricaAccountId: client.provider.present? ? client.provider.symbol : nil, - doisCountTotal: db_total, - doisMissing: db_total - es_total, - }.values - - puts CSV.generate_line(row) - - sum << CSV.generate_line(row) + rows = + clients.reduce([]) do |sum, client| + db_total = dois_by_client.dig(client.id).to_i + es_total = + client_totals[client.uid] ? client_totals[client.uid]["count"] : 0 + if (db_total - es_total) > 0 + # Limit for salesforce default of max 80 chars + name = +client.name.truncate(80) + # Clean the name to remove quotes, which can break csv parsers + name.gsub!(/["']/, "") + + row = { + accountName: name, + fabricaAccountId: client.symbol, + parentFabricaAccountId: + client.provider.present? ? client.provider.symbol : nil, + doisCountTotal: db_total, + doisMissing: db_total - es_total, + }.values + + puts CSV.generate_line(row) + + sum << CSV.generate_line(row) + end + + sum end - sum - end - csv = CSV::Table.new(rows, headers: headers) logger.warn "Found #{csv.count} repositories with missing DOIs." @@ -521,71 +704,117 @@ def self.export_doi_counts(query: nil) end protected - - def check_issn - Array.wrap(issn).each do |i| - if !i.is_a?(Hash) - errors.add(:issn, "ISSN should be an object and not a string.") - elsif i["issnl"].present? - errors.add(:issn, "ISSN-L #{i['issnl']} is in the wrong format.") unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["issnl"]) - end - if i["electronic"].present? - errors.add(:issn, "ISSN (electronic) #{i['electronic']} is in the wrong format.") unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["electronic"]) - end - if i["print"].present? - errors.add(:issn, "ISSN (print) #{i['print']} is in the wrong format.") unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["print"]) + def check_issn + Array.wrap(issn).each do |i| + if !i.is_a?(Hash) + errors.add(:issn, "ISSN should be an object and not a string.") + elsif i["issnl"].present? + unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["issnl"]) + errors.add(:issn, "ISSN-L #{i['issnl']} is in the wrong format.") + end + end + if i["electronic"].present? + unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["electronic"]) + errors.add( + :issn, + "ISSN (electronic) #{i['electronic']} is in the wrong format.", + ) + end + end + if i["print"].present? + unless /\A\d{4}(-)?\d{3}[0-9X]+\z/.match?(i["print"]) + errors.add( + :issn, + "ISSN (print) #{i['print']} is in the wrong format.", + ) + end + end end end - end - def check_language - Array.wrap(language).each do |l| - errors.add(:issn, "Language can't be empty.") if l.blank? + def check_language + Array.wrap(language).each do |l| + errors.add(:issn, "Language can't be empty.") if l.blank? + end end - end - def check_certificate - Array.wrap(certificate).each do |c| - errors.add(:certificate, "Certificate #{c} is not included in the list of supported certificates.") unless ["CoreTrustSeal", "DIN 31644", "DINI", "DSA", "RatSWD", "WDS", "CLARIN"].include?(c) + def check_certificate + Array.wrap(certificate).each do |c| + unless [ + "CoreTrustSeal", + "DIN 31644", + "DINI", + "DSA", + "RatSWD", + "WDS", + "CLARIN", + ].include?(c) + errors.add( + :certificate, + "Certificate #{ + c + } is not included in the list of supported certificates.", + ) + end + end end - end - def check_repository_type - Array.wrap(repository_type).each do |r| - errors.add(:repository_type, "Repository type #{r} is not included in the list of supported repository types.") unless %w(disciplinary governmental institutional multidisciplinary project-related other).include?(r) + def check_repository_type + Array.wrap(repository_type).each do |r| + unless %w[ + disciplinary + governmental + institutional + multidisciplinary + project-related + other + ].include?(r) + errors.add( + :repository_type, + "Repository type #{ + r + } is not included in the list of supported repository types.", + ) + end + end end - end - def uuid_format - errors.add(:globus_uuid, "#{globus_uuid} is not a valid UUID") unless UUID.validate(globus_uuid) - end + def uuid_format + unless UUID.validate(globus_uuid) + errors.add(:globus_uuid, "#{globus_uuid} is not a valid UUID") + end + end - def freeze_symbol - errors.add(:symbol, "cannot be changed") if symbol_changed? - end + def freeze_symbol + errors.add(:symbol, "cannot be changed") if symbol_changed? + end - def check_id - if symbol && symbol.split(".").first != provider.symbol - errors.add(:symbol, ", Your Client ID must include the name of your provider. Separated by a dot '.' ") + def check_id + if symbol && symbol.split(".").first != provider.symbol + errors.add( + :symbol, + ", Your Client ID must include the name of your provider. Separated by a dot '.' ", + ) + end end - end - def user_url - ENV["VOLPINO_URL"] + "/users?client-id=" + symbol.downcase - end + def user_url + ENV["VOLPINO_URL"] + "/users?client-id=" + symbol.downcase + end private - - def set_defaults - self.domains = "*" if domains.blank? - self.client_type = "repository" if client_type.blank? - self.issn = {} if issn.blank? || client_type == "repository" - self.certificate = [] if certificate.blank? || client_type == "periodical" - self.repository_type = [] if repository_type.blank? || client_type == "periodical" - self.is_active = is_active ? "\x01" : "\x00" - self.version = version.present? ? version + 1 : 0 - self.role_name = "ROLE_DATACENTRE" if role_name.blank? - self.doi_quota_used = 0 unless doi_quota_used.to_i > 0 - self.doi_quota_allowed = -1 unless doi_quota_allowed.to_i > 0 - end + def set_defaults + self.domains = "*" if domains.blank? + self.client_type = "repository" if client_type.blank? + self.issn = {} if issn.blank? || client_type == "repository" + self.certificate = [] if certificate.blank? || client_type == "periodical" + if repository_type.blank? || client_type == "periodical" + self.repository_type = [] + end + self.is_active = is_active ? "\x01" : "\x00" + self.version = version.present? ? version + 1 : 0 + self.role_name = "ROLE_DATACENTRE" if role_name.blank? + self.doi_quota_used = 0 unless doi_quota_used.to_i > 0 + self.doi_quota_allowed = -1 unless doi_quota_allowed.to_i > 0 + end end diff --git a/app/models/client_prefix.rb b/app/models/client_prefix.rb index 2e9c30b38..f51c619c0 100644 --- a/app/models/client_prefix.rb +++ b/app/models/client_prefix.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ClientPrefix < ApplicationRecord # include helper module for caching infrequently changing resources include Cacheable @@ -25,27 +27,29 @@ class ClientPrefix < ApplicationRecord end mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :uid, type: :keyword - indexes :provider_id, type: :keyword - indexes :client_id, type: :keyword - indexes :prefix_id, type: :keyword + indexes :id, type: :keyword + indexes :uid, type: :keyword + indexes :provider_id, type: :keyword + indexes :client_id, type: :keyword + indexes :prefix_id, type: :keyword indexes :provider_prefix_id, type: :keyword - indexes :created_at, type: :date - indexes :updated_at, type: :date + indexes :created_at, type: :date + indexes :updated_at, type: :date # index associations - indexes :client, type: :object - indexes :provider, type: :object - indexes :prefix, type: :object, properties: { - id: { type: :keyword }, - uid: { type: :keyword }, - provider_ids: { type: :keyword }, - client_ids: { type: :keyword }, - state: { type: :keyword }, - prefix: { type: :text }, - created_at: { type: :date }, - } + indexes :client, type: :object + indexes :provider, type: :object + indexes :prefix, + type: :object, + properties: { + id: { type: :keyword }, + uid: { type: :keyword }, + provider_ids: { type: :keyword }, + client_ids: { type: :keyword }, + state: { type: :keyword }, + prefix: { type: :text }, + created_at: { type: :date }, + } indexes :provider_prefix, type: :object end @@ -59,19 +63,51 @@ def as_indexed_json(options = {}) "provider_prefix_id" => provider_prefix_id, "created_at" => created_at, "updated_at" => updated_at, - "client" => options[:exclude_associations] ? nil : client.try(:as_indexed_json, exclude_associations: true), - "provider" => options[:exclude_associations] ? nil : provider.try(:as_indexed_json, exclude_associations: true), - "prefix" => options[:exclude_associations] ? nil : prefix.try(:as_indexed_json, exclude_associations: true), - "provider_prefix" => options[:exclude_associations] ? nil : provider_prefix.try(:as_indexed_json, exclude_associations: true), + "client" => + if options[:exclude_associations] + nil + else + client.try(:as_indexed_json, exclude_associations: true) + end, + "provider" => + if options[:exclude_associations] + nil + else + provider.try(:as_indexed_json, exclude_associations: true) + end, + "prefix" => + if options[:exclude_associations] + nil + else + prefix.try(:as_indexed_json, exclude_associations: true) + end, + "provider_prefix" => + if options[:exclude_associations] + nil + else + provider_prefix.try(:as_indexed_json, exclude_associations: true) + end, } end def self.query_aggregations { - years: { date_histogram: { field: "created_at", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 }, - aggs: { bucket_truncate: { bucket_sort: { size: 10 } } } }, - providers: { terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 } }, - clients: { terms: { field: "client_id_and_name", size: 10, min_doc_count: 1 } }, + years: { + date_histogram: { + field: "created_at", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, + }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, + }, + providers: { + terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 }, + }, + clients: { + terms: { field: "client_id_and_name", size: 10, min_doc_count: 1 }, + }, } end @@ -131,9 +167,8 @@ def provider_prefix_id=(value) end private - - # uuid for public id - def set_uid - self.uid = SecureRandom.uuid - end + # uuid for public id + def set_uid + self.uid = SecureRandom.uuid + end end diff --git a/app/models/concerns/authenticable.rb b/app/models/concerns/authenticable.rb index 0f6d4cfe1..b444a938e 100644 --- a/app/models/concerns/authenticable.rb +++ b/app/models/concerns/authenticable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Authenticable extend ActiveSupport::Concern @@ -10,7 +12,8 @@ def encode_token(payload) return nil if payload.blank? # replace newline characters with actual newlines - private_key = OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub('\n', "\n")) + private_key = + OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub('\n', "\n")) JWT.encode(payload, private_key, "RS256") rescue OpenSSL::PKey::RSAError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -23,7 +26,13 @@ def encode_alb_token(payload) return nil if payload.blank? || !Rails.env.test? # replace newline characters with actual newlines - private_key = OpenSSL::PKey.read(File.read(Rails.root.join("spec", "fixtures", "certs", "ec256-private.pem").to_s)) + private_key = + OpenSSL::PKey.read( + File.read( + Rails.root.join("spec", "fixtures", "certs", "ec256-private.pem"). + to_s, + ), + ) JWT.encode(payload, private_key, "ES256") rescue OpenSSL::PKey::ECError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -36,7 +45,13 @@ def encode_globus_token(payload) return nil if payload.blank? || !Rails.env.test? # replace newline characters with actual newlines - private_key = OpenSSL::PKey.read(File.read(Rails.root.join("spec", "fixtures", "certs", "ec512-private.pem").to_s)) + private_key = + OpenSSL::PKey.read( + File.read( + Rails.root.join("spec", "fixtures", "certs", "ec512-private.pem"). + to_s, + ), + ) JWT.encode(payload, private_key, "RS512") rescue OpenSSL::PKey::ECError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -56,18 +71,24 @@ def decode_token(token) case header["alg"] when "RS256" # DataCite JWT - public_key = OpenSSL::PKey::RSA.new(ENV["JWT_PUBLIC_KEY"].to_s.gsub('\n', "\n")) + public_key = + OpenSSL::PKey::RSA.new(ENV["JWT_PUBLIC_KEY"].to_s.gsub('\n', "\n")) payload = (JWT.decode token, public_key, true, algorithm: "RS256").first when "RS512" # Globus JWT - public_key = OpenSSL::PKey::RSA.new(cached_globus_public_key.fetch("n", nil).to_s.gsub('\n', "\n")) + public_key = + OpenSSL::PKey::RSA.new( + cached_globus_public_key.fetch("n", nil).to_s.gsub('\n', "\n"), + ) payload = (JWT.decode token, public_key, true, algorithm: "RS512").first else raise JWT::DecodeError, "Algorithm #{header['alg']} is not supported." end # check whether token has expired - fail JWT::ExpiredSignature, "The token has expired." unless Time.now.to_i < payload["exp"].to_i + unless Time.now.to_i < payload["exp"].to_i + fail JWT::ExpiredSignature, "The token has expired." + end payload rescue JWT::ExpiredSignature => e @@ -78,14 +99,21 @@ def decode_token(token) { errors: "The token could not be decoded." } rescue OpenSSL::PKey::RSAError => e public_key = ENV["JWT_PUBLIC_KEY"].presence || "nil" - Rails.logger.error "OpenSSL::PKey::RSAError: " + e.message + " for " + public_key + Rails.logger.error "OpenSSL::PKey::RSAError: " + e.message + " for " + + public_key { errors: "An error occured." } end # decode JWT token from AWS ALB using SHA-256 hash algorithm def decode_alb_token(token) if Rails.env.test? - public_key = OpenSSL::PKey.read(File.read(Rails.root.join("spec", "fixtures", "certs", "ec256-public.pem").to_s)) + public_key = + OpenSSL::PKey.read( + File.read( + Rails.root.join("spec", "fixtures", "certs", "ec256-public.pem"). + to_s, + ), + ) else header = JSON.parse(Base64.urlsafe_decode64(token.split(".").first)) kid = header["kid"] @@ -97,7 +125,9 @@ def decode_alb_token(token) fail NoMethodError, "Payload is not a hash" unless payload.is_a?(Hash) # check whether token has expired - fail JWT::ExpiredSignature, "The token has expired." unless Time.now.to_i < payload["exp"].to_i + unless Time.now.to_i < payload["exp"].to_i + fail JWT::ExpiredSignature, "The token has expired." + end payload rescue NoMethodError => e @@ -125,13 +155,17 @@ def encode_auth_param(username: nil, password: nil) def decode_auth_param(username: nil, password: nil) return {} unless username.present? && password.present? - user = if username.include?(".") - Client.where(symbol: username.upcase).first - else - Provider.unscoped.where(symbol: username.upcase).first - end + user = + if username.include?(".") + Client.where(symbol: username.upcase).first + else + Provider.unscoped.where(symbol: username.upcase).first + end - return {} unless user && secure_compare(user.password, encrypt_password_sha256(password)) + unless user && + secure_compare(user.password, encrypt_password_sha256(password)) + return {} + end uid = username.downcase @@ -187,10 +221,22 @@ def not_allowed_by_doi_and_user(doi: nil, user: nil) return true if doi.blank? return false if doi.aasm_state == "findable" return true if user.blank? - return false if %w(staff_admin staff_user).include?(user.role_id) - return false if %w(consortium_admin).include?(user.role_id) && user.provider_id.present? && user.provider_id.upcase == doi.provider.consortium_id - return false if %w(provider_admin provider_user).include?(user.role_id) && user.provider_id.present? && user.provider_id == doi.provider_id - return false if %w(client_admin client_user user temporary).include?(user.role_id) && user.client_id.present? && user.client_id == doi.client_id + return false if %w[staff_admin staff_user].include?(user.role_id) + if %w[consortium_admin].include?(user.role_id) && + user.provider_id.present? && + user.provider_id.upcase == doi.provider.consortium_id + return false + end + if %w[provider_admin provider_user].include?(user.role_id) && + user.provider_id.present? && + user.provider_id == doi.provider_id + return false + end + if %w[client_admin client_user user temporary].include?(user.role_id) && + user.client_id.present? && + user.client_id == doi.client_id + return false + end true end @@ -202,7 +248,8 @@ def encode_token(payload) return nil if payload.blank? # replace newline characters with actual newlines - private_key = OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub('\n', "\n")) + private_key = + OpenSSL::PKey::RSA.new(ENV["JWT_PRIVATE_KEY"].to_s.gsub('\n', "\n")) JWT.encode(payload, private_key, "RS256") rescue OpenSSL::PKey::RSAError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -215,7 +262,13 @@ def encode_token(payload) def encode_alb_token(payload) return nil if payload.blank? || !Rails.env.test? - private_key = OpenSSL::PKey.read(File.read(Rails.root.join("spec", "fixtures", "certs", "ec256-private.pem").to_s)) + private_key = + OpenSSL::PKey.read( + File.read( + Rails.root.join("spec", "fixtures", "certs", "ec256-private.pem"). + to_s, + ), + ) JWT.encode(payload, private_key, "ES256") rescue OpenSSL::PKey::ECError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -228,7 +281,13 @@ def encode_alb_token(payload) def encode_globus_token(payload) return nil if payload.blank? || !Rails.env.test? - private_key = OpenSSL::PKey.read(File.read(Rails.root.join("spec", "fixtures", "certs", "ec512-private.pem").to_s)) + private_key = + OpenSSL::PKey.read( + File.read( + Rails.root.join("spec", "fixtures", "certs", "ec512-private.pem"). + to_s, + ), + ) JWT.encode(payload, private_key, "RS512") rescue OpenSSL::PKey::ECError => e Rails.logger.error e.inspect + " for " + payload.inspect @@ -264,7 +323,8 @@ def generate_token(attributes = {}) end def generate_alb_token(attributes = {}) - preferred_username = attributes.fetch(:preferred_username, "0000-0001-5489-3594@orcid.org") + preferred_username = + attributes.fetch(:preferred_username, "0000-0001-5489-3594@orcid.org") payload = { uid: preferred_username[0..18], diff --git a/app/models/concerns/authorable.rb b/app/models/concerns/authorable.rb index 038c4e5ed..e48ad47b1 100644 --- a/app/models/concerns/authorable.rb +++ b/app/models/concerns/authorable.rb @@ -1,12 +1,12 @@ +# frozen_string_literal: true + module Authorable extend ActiveSupport::Concern require "namae" included do - IDENTIFIER_SCHEME_URIS = { - "ORCID" => "https://orcid.org/", - }.freeze + IDENTIFIER_SCHEME_URIS = { "ORCID" => "https://orcid.org/" }.freeze # parse author string into CSL format # only assume personal name when using sort-order: "Turing, Alan" @@ -21,19 +21,19 @@ def get_one_author(author, _options = {}) else name = names.first - { "family" => name.family, - "given" => name.given }.compact + { "family" => name.family, "given" => name.given }.compact end end def cleanup_author(author) # detect pattern "Smith J.", but not "Smith, John K." - author = author.gsub(/[[:space:]]([A-Z]\.)?(-?[A-Z]\.)$/, ', \1\2') unless author.include?(",") + unless author.include?(",") + author = author.gsub(/[[:space:]]([A-Z]\.)?(-?[A-Z]\.)$/, ', \1\2') + end # titleize strings # remove non-standard space characters - author.my_titleize. - gsub(/[[:space:]]/, " ") + author.my_titleize.gsub(/[[:space:]]/, " ") end def is_personal_name?(author) @@ -63,28 +63,34 @@ def get_one_hashed_author(author) # parse nameIdentifier from DataCite def get_name_identifiers(author) - name_identifiers = Array.wrap(author.fetch("nameIdentifier", nil)).reduce([]) do |sum, n| - n = { "__content__" => n } if n.is_a?(String) - - # fetch scheme_uri, default to ORCID - scheme = n.fetch("nameIdentifierScheme", nil) - scheme_uri = n.fetch("schemeURI", nil) || IDENTIFIER_SCHEME_URIS.fetch(scheme, "https://orcid.org") - scheme_uri = "https://orcid.org/" if validate_orcid_scheme(scheme_uri) - scheme_uri << "/" unless scheme_uri.present? && scheme_uri.end_with?("/") - - identifier = n.fetch("__content__", nil) - identifier = if scheme_uri == "https://orcid.org/" - validate_orcid(identifier) - else - identifier.gsub(" ", "-") - end - - if identifier.present? && scheme_uri.present? - sum << scheme_uri + identifier - else - sum + name_identifiers = + Array.wrap(author.fetch("nameIdentifier", nil)).reduce([]) do |sum, n| + n = { "__content__" => n } if n.is_a?(String) + + # fetch scheme_uri, default to ORCID + scheme = n.fetch("nameIdentifierScheme", nil) + scheme_uri = + n.fetch("schemeURI", nil) || + IDENTIFIER_SCHEME_URIS.fetch(scheme, "https://orcid.org") + scheme_uri = "https://orcid.org/" if validate_orcid_scheme(scheme_uri) + unless scheme_uri.present? && scheme_uri.end_with?("/") + scheme_uri << "/" + end + + identifier = n.fetch("__content__", nil) + identifier = + if scheme_uri == "https://orcid.org/" + validate_orcid(identifier) + else + identifier.gsub(" ", "-") + end + + if identifier.present? && scheme_uri.present? + sum << scheme_uri + identifier + else + sum + end end - end # return array of name identifiers, ORCID ID is first element if multiple name_identifiers.select { |n| n.start_with?("https://orcid.org") } + diff --git a/app/models/concerns/batch_loader_helper.rb b/app/models/concerns/batch_loader_helper.rb index 8829ff768..9329c25b2 100644 --- a/app/models/concerns/batch_loader_helper.rb +++ b/app/models/concerns/batch_loader_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module BatchLoaderHelper extend ActiveSupport::Concern ### TODO: remove after benchmark diff --git a/app/models/concerns/cacheable.rb b/app/models/concerns/cacheable.rb index 48e8b0f74..c0e1f7c8c 100644 --- a/app/models/concerns/cacheable.rb +++ b/app/models/concerns/cacheable.rb @@ -1,43 +1,55 @@ +# frozen_string_literal: true + module Cacheable extend ActiveSupport::Concern included do def cached_metadata_count(options = {}) - Rails.cache.fetch("cached_metadata_count/#{id}", expires_in: 6.hours, force: options[:force]) do + Rails.cache.fetch( + "cached_metadata_count/#{id}", + expires_in: 6.hours, force: options[:force], + ) do return [] if Rails.env.test? - collection = if self.class.name == "Doi" - Metadata.where(dataset: id) - else - Metadata - end + collection = + instance_of?(Doi) ? Metadata.where(dataset: id) : Metadata - years = collection.order("YEAR(metadata.created)").group("YEAR(metadata.created)").count + years = + collection.order("YEAR(metadata.created)").group( + "YEAR(metadata.created)", + ). + count years = years.map { |k, v| { id: k, title: k, count: v } } end end def cached_media_count(options = {}) - Rails.cache.fetch("cached_media_count/#{id}", expires_in: 6.hours, force: options[:force]) do + Rails.cache.fetch( + "cached_media_count/#{id}", + expires_in: 6.hours, force: options[:force], + ) do return [] if Rails.env.test? - if self.class.name == "Doi" + if instance_of?(Doi) collection = Media.where(dataset: id) return [] if collection.blank? else collection = Media end - years = collection.order("YEAR(media.created)").group("YEAR(media.created)").count + years = + collection.order("YEAR(media.created)").group("YEAR(media.created)"). + count years = years.map { |k, v| { id: k, title: k, count: v } } end end def cached_prefixes_totals(params = {}) if Rails.application.config.action_controller.perform_caching - Rails.cache.fetch("cached_prefixes_totals/#{params}", expires_in: 24.hours) do - prefixes_totals params - end + Rails.cache.fetch( + "cached_prefixes_totals/#{params}", + expires_in: 24.hours, + ) { prefixes_totals params } else prefixes_totals params end @@ -88,7 +100,11 @@ def cached_metadata_count Rails.cache.fetch("cached_metadata_count", expires_in: 6.hours) do return [] if Rails.env.test? - years = Metadata.order("YEAR(metadata.created)").group("YEAR(metadata.created)").count + years = + Metadata.order("YEAR(metadata.created)").group( + "YEAR(metadata.created)", + ). + count years = years.map { |k, v| { id: k, title: k, count: v } } end end @@ -97,7 +113,8 @@ def cached_media_count Rails.cache.fetch("cached_media_count", expires_in: 6.hours) do return [] if Rails.env.test? - years = Media.order("YEAR(media.created)").group("YEAR(media.created)").count + years = + Media.order("YEAR(media.created)").group("YEAR(media.created)").count years = years.map { |k, v| { id: k, title: k, count: v } } end end diff --git a/app/models/concerns/crosscitable.rb b/app/models/concerns/crosscitable.rb index 3909c86c4..92b73a436 100644 --- a/app/models/concerns/crosscitable.rb +++ b/app/models/concerns/crosscitable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Crosscitable extend ActiveSupport::Concern @@ -46,12 +48,22 @@ def parse_xml(input, options = {}) from = find_from_format(string: input) end - meta = from.present? ? send("read_" + from, string: input, doi: options[:doi], sandbox: sandbox).compact : {} + meta = + if from.present? + send( + "read_" + from, + string: input, doi: options[:doi], sandbox: sandbox, + ). + compact + else + {} + end meta.merge("string" => input, "from" => from) rescue NoMethodError, ArgumentError => e Raven.capture_exception(e) - Rails.logger.error "Error " + e.message.to_s + " for doi " + @doi.to_s + "." + Rails.logger.error "Error " + e.message.to_s + " for doi " + @doi.to_s + + "." Rails.logger.error exception.inspect {} @@ -60,9 +72,11 @@ def parse_xml(input, options = {}) def replace_doi(input, options = {}) return input if options[:doi].blank? - doc = Nokogiri::XML(input, nil, "UTF-8", &:noblanks) + doc = Nokogiri.XML(input, nil, "UTF-8", &:noblanks) node = doc.at_css("identifier") - node.content = options[:doi].to_s.upcase if node.present? && options[:doi].present? + if node.present? && options[:doi].present? + node.content = options[:doi].to_s.upcase + end doc.to_xml.strip end @@ -81,11 +95,41 @@ def update_xml end # generate new xml if attributes have been set directly and/or from metadata that are not DataCite XML - read_attrs = %w(creators contributors titles publisher publication_year types descriptions container sizes formats version_info language dates identifiers related_identifiers funding_references geo_locations rights_list subjects content_url schema_version).map do |a| - [a.to_sym, send(a.to_s)] - end.to_h.compact - - meta = from.present? ? send("read_" + from, { string: xml, doi: doi, sandbox: sandbox }.merge(read_attrs)) : {} + read_attrs = + %w[ + creators + contributors + titles + publisher + publication_year + types + descriptions + container + sizes + formats + version_info + language + dates + identifiers + related_identifiers + funding_references + geo_locations + rights_list + subjects + content_url + schema_version + ].map { |a| [a.to_sym, send(a.to_s)] }.to_h. + compact + + meta = + if from.present? + send( + "read_" + from, + { string: xml, doi: doi, sandbox: sandbox }.merge(read_attrs), + ) + else + {} + end xml = datacite_xml @@ -101,7 +145,7 @@ def clean_xml(string) rescue ArgumentError, Encoding::CompatibilityError => e # convert utf-16 to utf-8 string = string.force_encoding("UTF-16").encode("UTF-8") - string.gsub!('encoding="UTF-16"', 'encoding="UTF-8"') + string.gsub!("encoding=\"UTF-16\"", "encoding=\"UTF-8\"") end # remove optional bom @@ -111,10 +155,13 @@ def clean_xml(string) string = string.strip # handle missing and additional namespace - return nil unless string.start_with?(" e Rails.logger.error "Error " + e.message + "." @@ -134,7 +181,7 @@ def well_formed_xml(string) def from_xml(string) return nil unless string.start_with?(" date_type } + dd = + Array.wrap(dates).detect { |d| d["dateType"] == date_type } || + { "dateType" => date_type } dd["date"] = date end @@ -24,8 +28,11 @@ def set_resource_type(types, text, type) module ClassMethods def get_solr_date_range(from_date, until_date) from_date_string = get_datetime_from_input(from_date) || "*" - until_date_string = get_datetime_from_input(until_date, until_date: true) || "*" - until_date_string = get_datetime_from_input(from_date, until_date: true) if until_date_string != "*" && until_date_string < from_date_string + until_date_string = + get_datetime_from_input(until_date, until_date: true) || "*" + if until_date_string != "*" && until_date_string < from_date_string + until_date_string = get_datetime_from_input(from_date, until_date: true) + end "[" + from_date_string + " TO " + until_date_string + "]" end @@ -42,7 +49,12 @@ def get_datetime_from_input(iso8601_time, options = {}) def get_date_from_parts(year, month = nil, day = nil) return nil if year.blank? - iso8601_time = [year.to_s.rjust(4, "0"), month.to_s.rjust(2, "0"), day.to_s.rjust(2, "0")].reject { |part| part == "00" }.join("-") + iso8601_time = + [ + year.to_s.rjust(4, "0"), + month.to_s.rjust(2, "0"), + day.to_s.rjust(2, "0"), + ].reject { |part| part == "00" }.join("-") get_datetime_from_iso8601(iso8601_time) end @@ -54,7 +66,8 @@ def get_datetime_from_iso8601(iso8601_time, options = {}) if iso8601_time[8..9].present? ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_day.iso8601 elsif iso8601_time[5..6].present? - ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_month.iso8601 + ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_month. + iso8601 else ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_year.iso8601 end diff --git a/app/models/concerns/helpable.rb b/app/models/concerns/helpable.rb index 4877a1757..8ddb51bb2 100644 --- a/app/models/concerns/helpable.rb +++ b/app/models/concerns/helpable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Helpable extend ActiveSupport::Concern @@ -6,7 +8,7 @@ module Helpable require "securerandom" require "base32/url" - UPPER_LIMIT = 1073741823 + UPPER_LIMIT = 1_073_741_823 included do include Bolognese::Utils @@ -14,15 +16,18 @@ module Helpable def register_url if url.blank? - raise ActionController::BadRequest.new, "[Handle] Error updating DOI " + doi + ": url missing." + raise ActionController::BadRequest.new, + "[Handle] Error updating DOI " + doi + ": url missing." end if client_id.blank? - raise ActionController::BadRequest.new, "[Handle] Error updating DOI " + doi + ": client ID missing." + raise ActionController::BadRequest.new, + "[Handle] Error updating DOI " + doi + ": client ID missing." end unless is_registered_or_findable? - raise ActionController::BadRequest.new, "DOI is not registered or findable." + raise ActionController::BadRequest.new, + "DOI is not registered or findable." end payload = [ @@ -41,31 +46,48 @@ def register_url { "index" => 1, "type" => "URL", - "data" => { - "format" => "string", - "value" => url, - }, + "data" => { "format" => "string", "value" => url }, }, ].to_json handle_url = "#{ENV['HANDLE_URL']}/api/handles/#{doi}" - response = Maremma.put(handle_url, content_type: "application/json;charset=UTF-8", data: payload, username: "300%3A#{ENV['HANDLE_USERNAME']}", password: ENV["HANDLE_PASSWORD"], ssl_self_signed: true, timeout: 10) + response = + Maremma.put( + handle_url, + content_type: "application/json;charset=UTF-8", + data: payload, + username: "300%3A#{ENV['HANDLE_USERNAME']}", + password: ENV["HANDLE_PASSWORD"], + ssl_self_signed: true, + timeout: 10, + ) + + # update minted column after first successful registration in handle system if [200, 201].include?(response.status) - # update minted column after first successful registration in handle system success = true - success = update(minted: Time.zone.now, updated: Time.zone.now) if minted.blank? - Rails.logger.info "[Handle] URL for DOI " + doi + " updated to " + url + "." unless Rails.env.test? - - if success - __elasticsearch__.index_document + if minted.blank? + success = update(minted: Time.zone.now, updated: Time.zone.now) + end + unless Rails.env.test? + Rails.logger.info "[Handle] URL for DOI " + doi + " updated to " + + url + + "." end + + __elasticsearch__.index_document if success elsif response.status == 404 - Rails.logger.info "[Handle] Error updating URL for DOI " + doi + ": not found" + Rails.logger.info "[Handle] Error updating URL for DOI " + doi + + ": not found" elsif response.status == 408 - Rails.logger.warn "[Handle] Error updating URL for DOI " + doi + ": timeout" + Rails.logger.warn "[Handle] Error updating URL for DOI " + doi + + ": timeout" else - Rails.logger.error "[Handle] Error updating URL for DOI " + doi + ": " + response.body.inspect unless Rails.env.test? + unless Rails.env.test? + Rails.logger.error "[Handle] Error updating URL for DOI " + doi + + ": " + + response.body.inspect + end end response @@ -76,7 +98,11 @@ def get_url response = Maremma.get(url, ssl_self_signed: true, timeout: 10) if response.status != 200 - Rails.logger.error "[Handle] Error fetching URL for DOI " + doi + ": " + response.body.inspect unless Rails.env.test? + unless Rails.env.test? + Rails.logger.error "[Handle] Error fetching URL for DOI " + doi + + ": " + + response.body.inspect + end end response @@ -95,7 +121,10 @@ def generate_random_dois(str, options = {}) fail IdentifierError, "No valid prefix found" if prefix.blank? shoulder = str.split("/", 2)[1].to_s - encode_doi(prefix, shoulder: shoulder, number: options[:number], size: options[:size]) + encode_doi( + prefix, + shoulder: shoulder, number: options[:number], size: options[:size], + ) end def encode_doi(prefix, options = {}) @@ -110,7 +139,8 @@ def encode_doi(prefix, options = {}) Array.new(size).map do |_a| n = number.positive? ? number : SecureRandom.random_number(UPPER_LIMIT) - prefix.to_s + "/" + shoulder + Base32::URL.encode(n, split: split, length: length, checksum: true) + prefix.to_s + "/" + shoulder + + Base32::URL.encode(n, split: split, length: length, checksum: true) end.uniq end @@ -131,8 +161,9 @@ def match_url_with_domains(url: nil, domains: nil) uri = Addressable::URI.parse(url) domain_list = domains.split(",") domain_list.any? do |d| - # strip asterix for subdomain if d.starts_with?("*.") + # strip asterix for subdomain + d = d[1..-1] uri.host.ends_with?(d) else @@ -144,28 +175,56 @@ def match_url_with_domains(url: nil, domains: nil) module ClassMethods def get_dois(options = {}) - return OpenStruct.new(body: { "errors" => [{ "title" => "Prefix missing" }] }) if options[:prefix].blank? + if options[:prefix].blank? + return OpenStruct.new( + body: { "errors" => [{ "title" => "Prefix missing" }] }, + ) + end - count_url = ENV["HANDLE_URL"] + "/api/handles?prefix=#{options[:prefix]}&pageSize=0" - response = Maremma.get(count_url, username: "300%3A#{ENV['HANDLE_USERNAME']}", password: ENV["HANDLE_PASSWORD"], ssl_self_signed: true, timeout: 60) + count_url = + ENV["HANDLE_URL"] + "/api/handles?prefix=#{options[:prefix]}&pageSize=0" + response = + Maremma.get( + count_url, + username: "300%3A#{ENV['HANDLE_USERNAME']}", + password: ENV["HANDLE_PASSWORD"], + ssl_self_signed: true, + timeout: 60, + ) total = response.body.dig("data", "totalCount").to_i dois = [] if total > 0 # walk through paginated results - total_pages = (total.to_f / 1000).ceil + total_pages = (total.to_f / 1_000).ceil (0...total_pages).each do |page| - url = ENV["HANDLE_URL"] + "/api/handles?prefix=#{options[:prefix]}&page=#{page}&pageSize=1000" - response = Maremma.get(url, username: "300%3A#{ENV['HANDLE_USERNAME']}", password: ENV["HANDLE_PASSWORD"], ssl_self_signed: true, timeout: 60) + url = + ENV["HANDLE_URL"] + + "/api/handles?prefix=#{options[:prefix]}&page=#{ + page + }&pageSize=1000" + response = + Maremma.get( + url, + username: "300%3A#{ENV['HANDLE_USERNAME']}", + password: ENV["HANDLE_PASSWORD"], + ssl_self_signed: true, + timeout: 60, + ) if response.status == 200 dois += (response.body.dig("data", "handles") || []) else text = "Error " + response.body["errors"].inspect Rails.logger.error "[Handle] " + text - User.send_notification_to_slack(text, title: "Error #{response.status}", level: "danger") unless Rails.env.test? + unless Rails.env.test? + User.send_notification_to_slack( + text, + title: "Error #{response.status}", level: "danger", + ) + end end end end @@ -176,41 +235,99 @@ def get_dois(options = {}) end def get_doi(options = {}) - return OpenStruct.new(body: { "errors" => [{ "title" => "DOI missing" }] }) if options[:doi].blank? + if options[:doi].blank? + return OpenStruct.new( + body: { "errors" => [{ "title" => "DOI missing" }] }, + ) + end - url = Rails.env.production? ? "https://doi.org" : "https://handle.test.datacite.org" + url = + if Rails.env.production? + "https://doi.org" + else + "https://handle.test.datacite.org" + end url += "/api/handles/#{options[:doi]}" - response = Maremma.get(url, username: "300%3A#{ENV['HANDLE_USERNAME']}", password: ENV["HANDLE_PASSWORD"], ssl_self_signed: true, timeout: 10) + response = + Maremma.get( + url, + username: "300%3A#{ENV['HANDLE_USERNAME']}", + password: ENV["HANDLE_PASSWORD"], + ssl_self_signed: true, + timeout: 10, + ) if response.status == 200 response elsif response.status == 404 - OpenStruct.new(status: 404, body: { "errors" => [{ "status" => 404, "title" => "Not found." }] }) + OpenStruct.new( + status: 404, + body: { "errors" => [{ "status" => 404, "title" => "Not found." }] }, + ) else text = "Error " + response.body["errors"].inspect Rails.logger.error "[Handle] " + text - User.send_notification_to_slack(text, title: "Error #{response.status}", level: "danger") unless Rails.env.test? - OpenStruct.new(status: 400, body: { "errors" => [{ "status" => 400, "title" => response.body["errors"].inspect }] }) + unless Rails.env.test? + User.send_notification_to_slack( + text, + title: "Error #{response.status}", level: "danger", + ) + end + OpenStruct.new( + status: 400, + body: { + "errors" => [ + { "status" => 400, "title" => response.body["errors"].inspect }, + ], + }, + ) end end def delete_doi(options = {}) - return OpenStruct.new(body: { "errors" => [{ "title" => "DOI missing" }] }) if options[:doi].blank? - return OpenStruct.new(body: { "errors" => [{ "title" => "Only DOIs with prefix 10.5072 can be deleted" }] }) unless options[:doi].start_with?("10.5072") + if options[:doi].blank? + return OpenStruct.new( + body: { "errors" => [{ "title" => "DOI missing" }] }, + ) + end + unless options[:doi].start_with?("10.5072") + return OpenStruct.new( + body: { + "errors" => [ + { "title" => "Only DOIs with prefix 10.5072 can be deleted" }, + ], + }, + ) + end url = "#{ENV['HANDLE_URL']}/api/handles/#{options[:doi]}" - response = Maremma.delete(url, username: "300%3A#{ENV['HANDLE_USERNAME']}", password: ENV["HANDLE_PASSWORD"], ssl_self_signed: true, timeout: 10) + response = + Maremma.delete( + url, + username: "300%3A#{ENV['HANDLE_USERNAME']}", + password: ENV["HANDLE_PASSWORD"], + ssl_self_signed: true, + timeout: 10, + ) if response.status == 200 response elsif response.status == 404 - OpenStruct.new(status: 404, body: { "errors" => [{ "status" => 404, "title" => "Not found." }] }) + OpenStruct.new( + status: 404, + body: { "errors" => [{ "status" => 404, "title" => "Not found." }] }, + ) else text = "Error " + response.body["errors"].inspect Rails.logger.error "[Handle] " + text - User.send_notification_to_slack(text, title: "Error #{response.status}", level: "danger") unless Rails.env.test? + unless Rails.env.test? + User.send_notification_to_slack( + text, + title: "Error #{response.status}", level: "danger", + ) + end response end end @@ -223,7 +340,10 @@ def parse_attributes(element, options = {}) elsif element.is_a?(Hash) element.fetch(CGI.unescapeHTML(content), nil) elsif element.is_a?(Array) - a = element.map { |e| e.is_a?(Hash) ? e.fetch(CGI.unescapeHTML(content), nil) : e }.uniq + a = + element.map do |e| + e.is_a?(Hash) ? e.fetch(CGI.unescapeHTML(content), nil) : e + end.uniq options[:first] ? a.first : a.unwrap end end diff --git a/app/models/concerns/identifiable.rb b/app/models/concerns/identifiable.rb index eeddf73c5..d7eda3d9b 100644 --- a/app/models/concerns/identifiable.rb +++ b/app/models/concerns/identifiable.rb @@ -1,9 +1,16 @@ +# frozen_string_literal: true + module Identifiable extend ActiveSupport::Concern included do def normalize_doi(doi) - doi = Array(/\A(?:(http|https):\/(\/)?(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match(doi)).last + doi = + Array( + %r{\A(?:(http|https):/(/)?(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match(doi), + ). + last # remove non-printing whitespace and downcase doi = doi.delete("\u200B").downcase if doi.present? @@ -20,7 +27,11 @@ def get_doi_ra(prefix) end def validate_prefix(doi) - Array(/\A(?:(http|https):\/(\/)?(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}).*\z/.match(doi)).last + Array( + %r{\A(?:(http|https):/(/)?(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}).*\z}. + match(doi), + ). + last end end @@ -35,19 +46,29 @@ def get_doi_ra(prefix) end def validate_doi(doi) - doi = Array(/\A(?:(http|https):\/(\/)?(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match(doi)).last + doi = + Array( + %r{\A(?:(http|https):/(/)?(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match(doi), + ). + last # remove non-printing whitespace and downcase doi.delete("\u200B").downcase if doi.present? end def validate_prefix(doi) - Array(/\A(?:(http|https):\/(\/)?(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}).*\z/.match(doi)).last + Array( + %r{\A(?:(http|https):/(/)?(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}).*\z}. + match(doi), + ). + last end def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end end diff --git a/app/models/concerns/indexable.rb b/app/models/concerns/indexable.rb index ec7ed11f4..36828769b 100644 --- a/app/models/concerns/indexable.rb +++ b/app/models/concerns/indexable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Indexable extend ActiveSupport::Concern @@ -6,11 +8,23 @@ module Indexable included do after_commit on: %i[create update] do # use index_document instead of update_document to also update virtual attributes - IndexJob.perform_later(self) unless ["ProviderPrefix", "ClientPrefix"].include?(self.class.name) + unless %w[ProviderPrefix ClientPrefix].include?(self.class.name) + IndexJob.perform_later(self) + end - if (self.class.name == "DataciteDoi" || self.class.name == "OtherDoi" || self.class.name == "Doi") && (saved_change_to_attribute?("related_identifiers") || saved_change_to_attribute?("creators") || saved_change_to_attribute?("funding_references")) - send_import_message(to_jsonapi) if aasm_state == "findable" && !Rails.env.test? - elsif self.class.name == "Event" + if ( + instance_of?(DataciteDoi) || instance_of?(OtherDoi) || + instance_of?(Doi) + ) && + ( + saved_change_to_attribute?("related_identifiers") || + saved_change_to_attribute?("creators") || + saved_change_to_attribute?("funding_references") + ) + if aasm_state == "findable" && !Rails.env.test? + send_import_message(to_jsonapi) + end + elsif instance_of?(Event) OtherDoiJob.perform_later(dois_to_import) end end @@ -20,18 +34,22 @@ module Indexable IndexBackgroundJob.perform_later(self) end - after_commit on: [:destroy] do - begin - __elasticsearch__.delete_document unless ["ProviderPrefix", "ClientPrefix"].include?(self.class.name) - if self.class.name == "Event" - Rails.logger.info "#{self.class.name} #{uuid} deleted from Elasticsearch index." - else - Rails.logger.info "#{self.class.name} #{uid} deleted from Elasticsearch index." - end - # send_delete_message(self.to_jsonapi) if self.class.name == "Doi" && !Rails.env.test? - rescue Elasticsearch::Transport::Transport::Errors::NotFound => e - Rails.logger.error e.message + after_commit on: %i[destroy] do + unless %w[ProviderPrefix ClientPrefix].include?(self.class.name) + __elasticsearch__.delete_document end + if instance_of?(Event) + Rails.logger.info "#{self.class.name} #{ + uuid + } deleted from Elasticsearch index." + else + Rails.logger.info "#{self.class.name} #{ + uid + } deleted from Elasticsearch index." + end + # send_delete_message(self.to_jsonapi) if self.class.name == "Doi" && !Rails.env.test? + rescue Elasticsearch::Transport::Transport::Errors::NotFound => e + Rails.logger.error e.message end def send_delete_message(data) @@ -46,20 +64,21 @@ def send_import_message(data) # we use the AWS SQS client directly as there is no consumer in this app def send_message(body, options = {}) sqs = Aws::SQS::Client.new - queue_name_prefix = if Rails.env.stage? - ENV["ES_PREFIX"].present? ? "stage" : "test" - else - Rails.env - end - queue_url = sqs.get_queue_url(queue_name: "#{queue_name_prefix}_doi").queue_url + queue_name_prefix = + if Rails.env.stage? + ENV["ES_PREFIX"].present? ? "stage" : "test" + else + Rails.env + end + queue_url = + sqs.get_queue_url(queue_name: "#{queue_name_prefix}_doi").queue_url options[:shoryuken_class] ||= "DoiImportWorker" options = { queue_url: queue_url, message_attributes: { "shoryuken_class" => { - string_value: options[:shoryuken_class], - data_type: "String", + string_value: options[:shoryuken_class], data_type: "String" }, }, message_body: body.to_json, @@ -69,7 +88,7 @@ def send_message(body, options = {}) end def ror_from_url(url) - ror = Array(/\A(?:(http|https):\/\/)?(ror\.org\/)?(.+)/.match(url)).last + ror = Array(%r{\A(?:(http|https)://)?(ror\.org/)?(.+)}.match(url)).last "ror.org/#{ror}" if ror.present? end end @@ -80,12 +99,12 @@ def find_by_id(ids, options = {}) ids = ids.split(",") if ids.is_a?(String) options[:page] ||= {} options[:page][:number] ||= 1 - options[:page][:size] ||= 2000 + options[:page][:size] ||= 2_000 - if ["Prefix", "ProviderPrefix", "ClientPrefix"].include?(name) - options[:sort] ||= { created_at: { order: "asc" } } + options[:sort] ||= if %w[Prefix ProviderPrefix ClientPrefix].include?(name) + { created_at: { order: "asc" } } else - options[:sort] ||= { created: { order: "asc" } } + { created: { order: "asc" } } end __elasticsearch__.search( @@ -93,11 +112,7 @@ def find_by_id(ids, options = {}) size: options.dig(:page, :size), sort: [options[:sort]], track_total_hits: true, - query: { - terms: { - symbol: ids.map(&:upcase), - }, - }, + query: { terms: { symbol: ids.map(&:upcase) } }, aggregations: query_aggregations, ) end @@ -106,15 +121,16 @@ def find_by_id_list(ids, options = {}) options[:sort] ||= { "_doc" => { order: "asc" } } __elasticsearch__.search( - from: options[:page].present? ? (options.dig(:page, :number) - 1) * options.dig(:page, :size) : 0, + from: + if options[:page].present? + (options.dig(:page, :number) - 1) * options.dig(:page, :size) + else + 0 + end, size: options[:size] || 25, sort: [options[:sort]], track_total_hits: true, - query: { - terms: { - id: ids.split(","), - }, - }, + query: { terms: { id: ids.split(",") } }, aggregations: query_aggregations, ) end @@ -124,35 +140,36 @@ def query(query, options = {}) # map function is small performance hit if options[:scroll_id].present? && options.dig(:page, :scroll) begin - response = __elasticsearch__.client.scroll(body: - { scroll_id: options[:scroll_id], - scroll: options.dig(:page, :scroll) }) + response = + __elasticsearch__.client.scroll( + body: { + scroll_id: options[:scroll_id], + scroll: options.dig(:page, :scroll), + }, + ) return Hashie::Mash.new( total: response.dig("hits", "total", "value"), results: response.dig("hits", "hits").map { |r| r["_source"] }, scroll_id: response["_scroll_id"], ) - # handle expired scroll_id (Elasticsearch returns this error) + # handle expired scroll_id (Elasticsearch returns this error) rescue Elasticsearch::Transport::Transport::Errors::NotFound - return Hashie::Mash.new( - total: 0, - results: [], - scroll_id: nil, - ) + return Hashie::Mash.new(total: 0, results: [], scroll_id: nil) end end - aggregations = if options[:totals_agg] == "provider" - provider_aggregations - elsif options[:totals_agg] == "client" - client_aggregations - elsif options[:totals_agg] == "client_export" - client_export_aggregations - elsif options[:totals_agg] == "prefix" - prefix_aggregations - else - query_aggregations - end + aggregations = + if options[:totals_agg] == "provider" + provider_aggregations + elsif options[:totals_agg] == "client" + client_aggregations + elsif options[:totals_agg] == "client_export" + client_export_aggregations + elsif options[:totals_agg] == "prefix" + prefix_aggregations + else + query_aggregations + end options[:page] ||= {} options[:page][:number] ||= 1 @@ -172,19 +189,22 @@ def query(query, options = {}) search_after = cursor from = 0 - sort = if name == "Event" - [{ created_at: "asc", uuid: "asc" }] - elsif name == "Activity" - [{ created: "asc", request_uuid: "asc" }] - elsif %w(Client Provider).include?(name) - [{ created: "asc", uid: "asc" }] - elsif %w(Prefix ProviderPrefix ClientPrefix).include?(name) - [{ created_at: "asc", uid: "asc" }] - else - [{ created: "asc" }] - end + sort = + if name == "Event" + [{ created_at: "asc", uuid: "asc" }] + elsif name == "Activity" + [{ created: "asc", request_uuid: "asc" }] + elsif %w[Client Provider].include?(name) + [{ created: "asc", uid: "asc" }] + elsif %w[Prefix ProviderPrefix ClientPrefix].include?(name) + [{ created_at: "asc", uid: "asc" }] + else + [{ created: "asc" }] + end else - from = ((options.dig(:page, :number) || 1) - 1) * (options.dig(:page, :size) || 25) + from = + ((options.dig(:page, :number) || 1) - 1) * + (options.dig(:page, :size) || 25) search_after = nil sort = options[:sort] end @@ -199,123 +219,378 @@ def query(query, options = {}) query = query.gsub(/geoLocations/, "geo_locations") query = query.gsub(/landingPage/, "landing_page") query = query.gsub(/contentUrl/, "content_url") - query = query.gsub("/", '\/') + query = query.gsub("/", "\/") end must_not = [] filter = [] # filters for some classes + if name == "Provider" - if query.present? - must = [{ query_string: { query: query, fields: query_fields, default_operator: "AND", phrase_slop: 1 } }] + must = if query.present? + [ + { + query_string: { + query: query, + fields: query_fields, + default_operator: "AND", + phrase_slop: 1, + }, + }, + ] else - must = [{ match_all: {} }] + [{ match_all: {} }] end - filter << { range: { created: { gte: "#{options[:year].split(',').min}||/y", lte: "#{options[:year].split(',').max}||/y", format: "yyyy" } } } if options[:year].present? - filter << { range: { updated: { gte: "#{options[:from_date]}||/d" } } } if options[:from_date].present? - filter << { range: { updated: { lte: "#{options[:until_date]}||/d" } } } if options[:until_date].present? - filter << { term: { region: options[:region].upcase } } if options[:region].present? - filter << { term: { "consortium_id.raw" => options[:consortium_id] } } if options[:consortium_id].present? - filter << { terms: { member_type: options[:member_type].split(",") } } if options[:member_type].present? - filter << { terms: { organization_type: options[:organization_type].split(",") } } if options[:organization_type].present? - filter << { term: { non_profit_status: options[:non_profit_status] } } if options[:non_profit_status].present? - filter << { terms: { focus_area: options[:focus_area].split(",") } } if options[:focus_area].present? + if options[:year].present? + filter << + { + range: { + created: { + gte: "#{options[:year].split(',').min}||/y", + lte: "#{options[:year].split(',').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:from_date].present? + filter << + { range: { updated: { gte: "#{options[:from_date]}||/d" } } } + end + if options[:until_date].present? + filter << + { range: { updated: { lte: "#{options[:until_date]}||/d" } } } + end + if options[:region].present? + filter << { term: { region: options[:region].upcase } } + end + if options[:consortium_id].present? + filter << { term: { "consortium_id.raw" => options[:consortium_id] } } + end + if options[:member_type].present? + filter << { terms: { member_type: options[:member_type].split(",") } } + end + if options[:organization_type].present? + filter << + { + terms: { + organization_type: options[:organization_type].split(","), + }, + } + end + if options[:non_profit_status].present? + filter << { term: { non_profit_status: options[:non_profit_status] } } + end + if options[:focus_area].present? + filter << { terms: { focus_area: options[:focus_area].split(",") } } + end - must_not << { exists: { field: "deleted_at" } } unless options[:include_deleted] + unless options[:include_deleted] + must_not << { exists: { field: "deleted_at" } } + end must_not << { term: { role_name: "ROLE_ADMIN" } } elsif name == "Client" - if query.present? - must = [{ query_string: { query: query, fields: query_fields, default_operator: "AND", phrase_slop: 1 } }] + must = if query.present? + [ + { + query_string: { + query: query, + fields: query_fields, + default_operator: "AND", + phrase_slop: 1, + }, + }, + ] else - must = [{ match_all: {} }] - end - - filter << { range: { created: { gte: "#{options[:year].split(',').min}||/y", lte: "#{options[:year].split(',').max}||/y", format: "yyyy" } } } if options[:year].present? - filter << { range: { updated: { gte: "#{options[:from_date]}||/d" } } } if options[:from_date].present? - filter << { range: { updated: { lte: "#{options[:until_date]}||/d" } } } if options[:until_date].present? - filter << { terms: { provider_id: options[:provider_id].split(",") } } if options[:provider_id].present? - filter << { terms: { "software.raw" => options[:software].split(",") } } if options[:software].present? - filter << { terms: { certificate: options[:certificate].split(",") } } if options[:certificate].present? - filter << { terms: { repository_type: options[:repository_type].split(",") } } if options[:repository_type].present? - filter << { term: { consortium_id: options[:consortium_id] } } if options[:consortium_id].present? - filter << { term: { re3data_id: options[:re3data_id].gsub("/", '\/').upcase } } if options[:re3data_id].present? - filter << { term: { opendoar_id: options[:opendoar_id] } } if options[:opendoar_id].present? - filter << { term: { client_type: options[:client_type] } } if options[:client_type].present? - must_not << { exists: { field: "deleted_at" } } unless options[:include_deleted] + [{ match_all: {} }] + end + + if options[:year].present? + filter << + { + range: { + created: { + gte: "#{options[:year].split(',').min}||/y", + lte: "#{options[:year].split(',').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:from_date].present? + filter << + { range: { updated: { gte: "#{options[:from_date]}||/d" } } } + end + if options[:until_date].present? + filter << + { range: { updated: { lte: "#{options[:until_date]}||/d" } } } + end + if options[:provider_id].present? + filter << { terms: { provider_id: options[:provider_id].split(",") } } + end + if options[:software].present? + filter << + { terms: { "software.raw" => options[:software].split(",") } } + end + if options[:certificate].present? + filter << { terms: { certificate: options[:certificate].split(",") } } + end + if options[:repository_type].present? + filter << + { terms: { repository_type: options[:repository_type].split(",") } } + end + if options[:consortium_id].present? + filter << { term: { consortium_id: options[:consortium_id] } } + end + if options[:re3data_id].present? + filter << + { + term: { re3data_id: options[:re3data_id].gsub("/", "\/").upcase }, + } + end + if options[:opendoar_id].present? + filter << { term: { opendoar_id: options[:opendoar_id] } } + end + if options[:client_type].present? + filter << { term: { client_type: options[:client_type] } } + end + unless options[:include_deleted] + must_not << { exists: { field: "deleted_at" } } + end elsif name == "Event" - if query.present? - must = [{ query_string: { query: query, fields: query_fields, default_operator: "AND", phrase_slop: 1 } }] + must = if query.present? + [ + { + query_string: { + query: query, + fields: query_fields, + default_operator: "AND", + phrase_slop: 1, + }, + }, + ] else - must = [{ match_all: {} }] - end - - filter << { term: { subj_id: URI.decode(options[:subj_id]) } } if options[:subj_id].present? - filter << { term: { obj_id: URI.decode(options[:obj_id]) } } if options[:obj_id].present? - filter << { term: { citation_type: options[:citation_type] } } if options[:citation_type].present? - filter << { term: { year_month: options[:year_month] } } if options[:year_month].present? - filter << { range: { "subj.datePublished" => { gte: "#{options[:publication_year].split('-').min}||/y", lte: "#{options[:publication_year].split('-').max}||/y", format: "yyyy" } } } if options[:publication_year].present? - filter << { range: { occurred_at: { gte: "#{options[:occurred_at].split('-').min}||/y", lte: "#{options[:occurred_at].split('-').max}||/y", format: "yyyy" } } } if options[:occurred_at].present? - filter << { terms: { prefix: options[:prefix].split(",") } } if options[:prefix].present? - filter << { terms: { doi: options[:doi].downcase.split(",") } } if options[:doi].present? - filter << { terms: { source_doi: options[:source_doi].downcase.split(",") } } if options[:source_doi].present? - filter << { terms: { target_doi: options[:target_doi].downcase.split(",") } } if options[:target_doi].present? - filter << { terms: { orcid: options[:orcid].split(",") } } if options[:orcid].present? - filter << { terms: { isni: options[:isni].split(",") } } if options[:isni].present? - filter << { terms: { subtype: options[:subtype].split(",") } } if options[:subtype].present? - filter << { terms: { source_id: options[:source_id].split(",") } } if options[:source_id].present? - filter << { terms: { relation_type_id: options[:relation_type_id].split(",") } } if options[:relation_type_id].present? - filter << { terms: { source_relation_type_id: options[:source_relation_type_id].split(",") } } if options[:source_relation_type_id].present? - filter << { terms: { target_relation_type_id: options[:target_relation_type_id].split(",") } } if options[:target_relation_type_id].present? - filter << { terms: { registrant_id: options[:registrant_id].split(",") } } if options[:registrant_id].present? - filter << { terms: { registrant_id: options[:provider_id].split(",") } } if options[:provider_id].present? - filter << { terms: { issn: options[:issn].split(",") } } if options[:issn].present? - - must_not << { exists: { field: "target_doi" } } if options[:update_target_doi].present? + [{ match_all: {} }] + end + + if options[:subj_id].present? + filter << { term: { subj_id: URI.decode(options[:subj_id]) } } + end + if options[:obj_id].present? + filter << { term: { obj_id: URI.decode(options[:obj_id]) } } + end + if options[:citation_type].present? + filter << { term: { citation_type: options[:citation_type] } } + end + if options[:year_month].present? + filter << { term: { year_month: options[:year_month] } } + end + if options[:publication_year].present? + filter << + { + range: { + "subj.datePublished" => { + gte: "#{options[:publication_year].split('-').min}||/y", + lte: "#{options[:publication_year].split('-').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:occurred_at].present? + filter << + { + range: { + occurred_at: { + gte: "#{options[:occurred_at].split('-').min}||/y", + lte: "#{options[:occurred_at].split('-').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:prefix].present? + filter << { terms: { prefix: options[:prefix].split(",") } } + end + if options[:doi].present? + filter << { terms: { doi: options[:doi].downcase.split(",") } } + end + if options[:source_doi].present? + filter << + { terms: { source_doi: options[:source_doi].downcase.split(",") } } + end + if options[:target_doi].present? + filter << + { terms: { target_doi: options[:target_doi].downcase.split(",") } } + end + if options[:orcid].present? + filter << { terms: { orcid: options[:orcid].split(",") } } + end + if options[:isni].present? + filter << { terms: { isni: options[:isni].split(",") } } + end + if options[:subtype].present? + filter << { terms: { subtype: options[:subtype].split(",") } } + end + if options[:source_id].present? + filter << { terms: { source_id: options[:source_id].split(",") } } + end + if options[:relation_type_id].present? + filter << + { + terms: { relation_type_id: options[:relation_type_id].split(",") }, + } + end + if options[:source_relation_type_id].present? + filter << + { + terms: { + source_relation_type_id: + options[:source_relation_type_id].split(","), + }, + } + end + if options[:target_relation_type_id].present? + filter << + { + terms: { + target_relation_type_id: + options[:target_relation_type_id].split(","), + }, + } + end + if options[:registrant_id].present? + filter << + { terms: { registrant_id: options[:registrant_id].split(",") } } + end + if options[:provider_id].present? + filter << + { terms: { registrant_id: options[:provider_id].split(",") } } + end + if options[:issn].present? + filter << { terms: { issn: options[:issn].split(",") } } + end + + if options[:update_target_doi].present? + must_not << { exists: { field: "target_doi" } } + end elsif name == "Prefix" - must = if query.present? - [{ prefix: { prefix: query } }] - else - [{ match_all: {} }] - end - - filter << { range: { created_at: { gte: "#{options[:year].split(',').min}||/y", lte: "#{options[:year].split(',').max}||/y", format: "yyyy" } } } if options[:year].present? - filter << { terms: { provider_ids: options[:provider_id].split(",") } } if options[:provider_id].present? - filter << { terms: { client_ids: options[:client_id].to_s.split(",") } } if options[:client_id].present? - filter << { terms: { state: options[:state].to_s.split(",") } } if options[:state].present? + must = + query.present? ? [{ prefix: { prefix: query } }] : [{ match_all: {} }] + + if options[:year].present? + filter << + { + range: { + created_at: { + gte: "#{options[:year].split(',').min}||/y", + lte: "#{options[:year].split(',').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:provider_id].present? + filter << + { terms: { provider_ids: options[:provider_id].split(",") } } + end + if options[:client_id].present? + filter << + { terms: { client_ids: options[:client_id].to_s.split(",") } } + end + if options[:state].present? + filter << { terms: { state: options[:state].to_s.split(",") } } + end elsif name == "ProviderPrefix" - must = if query.present? - [{ prefix: { prefix_id: query } }] - else - [{ match_all: {} }] - end - - filter << { range: { created_at: { gte: "#{options[:year].split(',').min}||/y", lte: "#{options[:year].split(',').max}||/y", format: "yyyy" } } } if options[:year].present? - filter << { terms: { provider_id: options[:provider_id].split(",") } } if options[:provider_id].present? - filter << { terms: { provider_id: options[:consortium_organization_id].split(",") } } if options[:consortium_organization_id].present? - filter << { term: { consortium_id: options[:consortium_id] } } if options[:consortium_id].present? - filter << { term: { prefix_id: options[:prefix_id] } } if options[:prefix_id].present? - filter << { terms: { uid: options[:uid].to_s.split(",") } } if options[:uid].present? - filter << { terms: { state: options[:state].to_s.split(",") } } if options[:state].present? + must = + if query.present? + [{ prefix: { prefix_id: query } }] + else + [{ match_all: {} }] + end + + if options[:year].present? + filter << + { + range: { + created_at: { + gte: "#{options[:year].split(',').min}||/y", + lte: "#{options[:year].split(',').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:provider_id].present? + filter << { terms: { provider_id: options[:provider_id].split(",") } } + end + if options[:consortium_organization_id].present? + filter << + { + terms: { + provider_id: options[:consortium_organization_id].split(","), + }, + } + end + if options[:consortium_id].present? + filter << { term: { consortium_id: options[:consortium_id] } } + end + if options[:prefix_id].present? + filter << { term: { prefix_id: options[:prefix_id] } } + end + if options[:uid].present? + filter << { terms: { uid: options[:uid].to_s.split(",") } } + end + if options[:state].present? + filter << { terms: { state: options[:state].to_s.split(",") } } + end elsif name == "ClientPrefix" - must = if query.present? - [{ prefix: { prefix_id: query } }] - else - [{ match_all: {} }] - end - - filter << { range: { created_at: { gte: "#{options[:year].split(',').min}||/y", lte: "#{options[:year].split(',').max}||/y", format: "yyyy" } } } if options[:year].present? - filter << { terms: { client_id: options[:client_id].split(",") } } if options[:client_id].present? - filter << { term: { prefix_id: options[:prefix_id] } } if options[:prefix_id].present? + must = + if query.present? + [{ prefix: { prefix_id: query } }] + else + [{ match_all: {} }] + end + + if options[:year].present? + filter << + { + range: { + created_at: { + gte: "#{options[:year].split(',').min}||/y", + lte: "#{options[:year].split(',').max}||/y", + format: "yyyy", + }, + }, + } + end + if options[:client_id].present? + filter << { terms: { client_id: options[:client_id].split(",") } } + end + if options[:prefix_id].present? + filter << { term: { prefix_id: options[:prefix_id] } } + end elsif name == "Activity" - if query.present? - must = [{ query_string: { query: query, fields: query_fields, default_operator: "AND", phrase_slop: 1 } }] + must = if query.present? + [ + { + query_string: { + query: query, + fields: query_fields, + default_operator: "AND", + phrase_slop: 1, + }, + }, + ] else - must = [{ match_all: {} }] + [{ match_all: {} }] end - filter << { terms: { uid: options[:uid].to_s.split(",") } } if options[:uid].present? + if options[:uid].present? + filter << { terms: { uid: options[:uid].to_s.split(",") } } + end end # ES query can be optionally defined in different ways @@ -324,22 +599,17 @@ def query(query, options = {}) es_query = {} # The main bool query with filters - bool_query = { - must: must, - must_not: must_not, - filter: filter, - } + bool_query = { must: must, must_not: must_not, filter: filter } # Function score is used to provide varying score to return different values # We use the bool query above as our principle query # Then apply additional function scoring as appropriate # Note this can be performance intensive. function_score = { - query: { - bool: bool_query, - }, + query: { bool: bool_query }, random_score: { - "seed": Rails.env.test? ? "random_1234" : "random_#{rand(1...100000)}", + "seed": + Rails.env.test? ? "random_1234" : "random_#{rand(1...100_000)}", }, } @@ -354,15 +624,10 @@ def query(query, options = {}) # Sample grouping is optional included aggregation if options[:sample_group].present? aggregations[:samples] = { - terms: { - field: options[:sample_group], - size: 10000, - }, + terms: { field: options[:sample_group], size: 10_000 }, aggs: { "samples_hits": { - top_hits: { - size: options[:sample_size].presence || 1, - }, + top_hits: { size: options[:sample_size].presence || 1 }, }, }, } @@ -376,40 +641,45 @@ def query(query, options = {}) # can't use search wrapper function for scroll api # map function for scroll is small performance hit if options.dig(:page, :scroll).present? - response = __elasticsearch__.client.search( - index: index_name, - scroll: options.dig(:page, :scroll), - body: { + response = + __elasticsearch__.client.search( + index: index_name, + scroll: options.dig(:page, :scroll), + body: { + size: options.dig(:page, :size), + sort: sort, + query: es_query, + aggregations: aggregations, + track_total_hits: true, + }.compact, + ) + Hashie::Mash.new( + total: response.dig("hits", "total", "value"), + results: response.dig("hits", "hits").map { |r| r["_source"] }, + scroll_id: response["_scroll_id"], + ) + elsif options.fetch(:page, {}).key?(:cursor) + __elasticsearch__.search( + { size: options.dig(:page, :size), + search_after: search_after, sort: sort, query: es_query, aggregations: aggregations, track_total_hits: true, }.compact, ) - Hashie::Mash.new( - total: response.dig("hits", "total", "value"), - results: response.dig("hits", "hits").map { |r| r["_source"] }, - scroll_id: response["_scroll_id"], - ) - elsif options.fetch(:page, {}).key?(:cursor) - __elasticsearch__.search({ - size: options.dig(:page, :size), - search_after: search_after, - sort: sort, - query: es_query, - aggregations: aggregations, - track_total_hits: true, - }.compact) else - __elasticsearch__.search({ - size: options.dig(:page, :size), - from: from, - sort: sort, - query: es_query, - aggregations: aggregations, - track_total_hits: true, - }.compact) + __elasticsearch__.search( + { + size: options.dig(:page, :size), + from: from, + sort: sort, + query: es_query, + aggregations: aggregations, + track_total_hits: true, + }.compact, + ) end end @@ -535,8 +805,11 @@ def delete_alias(options = {}) client.indices.delete_alias index: index_name, name: alias_name "Deleted alias #{alias_name} for index #{index_name}." end - if client.indices.exists_alias?(name: alias_name, index: [alternate_index_name]) - client.indices.delete_alias index: alternate_index_name, name: alias_name + if client.indices.exists_alias?( + name: alias_name, index: [alternate_index_name], + ) + client.indices.delete_alias index: alternate_index_name, + name: alias_name "Deleted alias #{alias_name} for index #{alternate_index_name}." end # end @@ -550,12 +823,13 @@ def create_index(options = {}) client = Elasticsearch::Model.client # delete index if it has the same name as the alias - __elasticsearch__.delete_index!(index: alias_name) if __elasticsearch__.index_exists?(index: alias_name) && !client.indices.exists_alias?(name: alias_name) - - if name == "DataciteDoi" || name == "OtherDoi" - create_template + if __elasticsearch__.index_exists?(index: alias_name) && + !client.indices.exists_alias?(name: alias_name) + __elasticsearch__.delete_index!(index: alias_name) end + create_template if name == "DataciteDoi" || name == "OtherDoi" + # indexes in DOI model are aliased from DataciteDoi and OtherDoi models # TODO switch to DataciteDoi index # if self.name == "Doi" @@ -571,8 +845,12 @@ def create_index(options = {}) # "Created indexes #{datacite_index_name}, #{other_index_name}, #{datacite_alternate_index_name}, and #{other_alternate_index_name}." # else - __elasticsearch__.create_index!(index: index_name) unless __elasticsearch__.index_exists?(index: index_name) - __elasticsearch__.create_index!(index: alternate_index_name) unless __elasticsearch__.index_exists?(index: alternate_index_name) + unless __elasticsearch__.index_exists?(index: index_name) + __elasticsearch__.create_index!(index: index_name) + end + unless __elasticsearch__.index_exists?(index: alternate_index_name) + __elasticsearch__.create_index!(index: alternate_index_name) + end "Created indexes #{index_name} and #{alternate_index_name}." # end @@ -606,8 +884,12 @@ def delete_index(options = {}) # "Deleted indexes #{datacite_index_name}, #{other_index_name}, #{datacite_alternate_index_name}, and #{other_alternate_index_name}." # else - __elasticsearch__.delete_index!(index: index_name) if __elasticsearch__.index_exists?(index: index_name) - __elasticsearch__.delete_index!(index: alternate_index_name) if __elasticsearch__.index_exists?(index: alternate_index_name) + if __elasticsearch__.index_exists?(index: index_name) + __elasticsearch__.delete_index!(index: index_name) + end + if __elasticsearch__.index_exists?(index: alternate_index_name) + __elasticsearch__.delete_index!(index: alternate_index_name) + end "Deleted indexes #{index_name} and #{alternate_index_name}." # end @@ -660,21 +942,27 @@ def index_stats(options = {}) # "inactive index #{inactive_index} has #{inactive_index_count} documents, " \ # "database has #{database_count} documents." # else - stats = client.indices.stats index: [active_index, inactive_index], docs: true - active_index_count = stats.dig("indices", active_index, "primaries", "docs", "count") - inactive_index_count = stats.dig("indices", inactive_index, "primaries", "docs", "count") + stats = + client.indices.stats index: [active_index, inactive_index], docs: true + active_index_count = + stats.dig("indices", active_index, "primaries", "docs", "count") + inactive_index_count = + stats.dig("indices", inactive_index, "primaries", "docs", "count") # workaround until STI is enabled - database_count = if name == "DataCiteDoi" - where(type: "DataCiteDoi").count - elsif name == "OtherDoi" - where(type: "OtherDoi").count - else - all.count - end + database_count = + if name == "DataCiteDoi" + where(type: "DataCiteDoi").count + elsif name == "OtherDoi" + where(type: "OtherDoi").count + else + all.count + end "Active index #{active_index} has #{active_index_count} documents, " \ - "inactive index #{inactive_index} has #{inactive_index_count} documents, " \ + "inactive index #{inactive_index} has #{ + inactive_index_count + } documents, " \ "database has #{database_count} documents." # end end @@ -693,17 +981,40 @@ def switch_index(options = {}) if client.indices.exists_alias?(name: alias_name, index: [index_name]) client.indices.update_aliases body: { actions: [ - { remove: { index: index_name, alias: alias_name } }, - { add: { index: alternate_index_name, alias: alias_name, is_write_index: is_write_index } }, + { + remove: { + index: index_name, + alias: alias_name, + }, + }, + { + add: { + index: alternate_index_name, + alias: alias_name, + is_write_index: is_write_index, + }, + }, ], } "Switched active index to #{alternate_index_name}." - elsif client.indices.exists_alias?(name: alias_name, index: [alternate_index_name]) + elsif client.indices.exists_alias?( + name: alias_name, index: [alternate_index_name], + ) client.indices.update_aliases body: { actions: [ - { remove: { index: alternate_index_name, alias: alias_name } }, - { add: { index: index_name, alias: alias_name } }, + { + remove: { + index: alternate_index_name, + alias: alias_name, + }, + }, + { + add: { + index: index_name, + alias: alias_name, + }, + }, ], } @@ -825,28 +1136,35 @@ def inactive_index def create_template alias_name = index_name - body = if name == "Doi" || name == "DataciteDoi" || name == "OtherDoi" - { - index_patterns: ["dois*"], - settings: Doi.settings.to_hash, - mappings: Doi.mappings.to_hash, - } - else - { - index_patterns: ["#{alias_name}*"], - settings: settings.to_hash, - mappings: mappings.to_hash, - } - end + body = + if name == "Doi" || name == "DataciteDoi" || name == "OtherDoi" + { + index_patterns: %w[dois*], + settings: Doi.settings.to_hash, + mappings: Doi.mappings.to_hash, + } + else + { + index_patterns: ["#{alias_name}*"], + settings: settings.to_hash, + mappings: mappings.to_hash, + } + end client = Elasticsearch::Model.client exists = client.indices.exists_template?(name: alias_name) response = client.indices.put_template(name: alias_name, body: body) if response.to_h["acknowledged"] - exists ? "Updated template #{alias_name}." : "Created template #{alias_name}." + if exists + "Updated template #{alias_name}." + else + "Created template #{alias_name}." + end + elsif exists + "An error occured updating template #{alias_name}." else - exists ? "An error occured updating template #{alias_name}." : "An error occured creating template #{alias_name}." + "An error occured creating template #{alias_name}." end end @@ -881,7 +1199,8 @@ def delete_by_query(options = {}) return "ENV['QUERY'] is required" if options[:query].blank? client = Elasticsearch::Model.client - response = client.delete_by_query(index: options[:index], q: options[:query]) + response = + client.delete_by_query(index: options[:index], q: options[:query]) if response.to_h["deleted"] "Deleted #{response.to_h['deleted'].to_i} DOIs." @@ -891,18 +1210,19 @@ def delete_by_query(options = {}) end def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end def orcid_from_url(url) - Array(/\A(?:(http|https):\/\/)?(orcid\.org\/)?(.+)/.match(url)).last + Array(%r{\A(?:(http|https)://)?(orcid\.org/)?(.+)}.match(url)).last end def ror_from_url(url) - ror = Array(/\A(?:(http|https):\/\/)?(ror\.org\/)?(.+)/.match(url)).last + ror = Array(%r{\A(?:(http|https)://)?(ror\.org/)?(.+)}.match(url)).last "ror.org/#{ror}" if ror.present? end end diff --git a/app/models/concerns/mailable.rb b/app/models/concerns/mailable.rb index 97366b35e..f93480613 100644 --- a/app/models/concerns/mailable.rb +++ b/app/models/concerns/mailable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Mailable extend ActiveSupport::Concern @@ -7,10 +9,10 @@ module Mailable included do def send_welcome_email(responsible_id: nil) - if self.class.name == "Provider" + if instance_of?(Provider) client_id = nil provider_id = symbol.downcase - elsif self.class.name == "Client" + elsif instance_of?(Client) client_id = symbol.downcase provider_id = provider_id end @@ -23,21 +25,58 @@ def send_welcome_email(responsible_id: nil) "provider_id" => provider_id, }.compact - jwt = encode_token(payload.merge(iat: Time.now.to_i, exp: Time.now.to_i + 3600 * 24, aud: Rails.env)) + jwt = + encode_token( + payload.merge( + iat: Time.now.to_i, exp: Time.now.to_i + 3_600 * 24, aud: Rails.env, + ), + ) url = ENV["BRACCO_URL"] + "?jwt=" + jwt reset_url = ENV["BRACCO_URL"] + "/reset" - if Rails.env.stage? - title = ENV["ES_PREFIX"].present? ? "DataCite Fabrica Stage" : "DataCite Fabrica Test" + title = if Rails.env.stage? + if ENV["ES_PREFIX"].present? + "DataCite Fabrica Stage" + else + "DataCite Fabrica Test" + end else - title = "DataCite Fabrica" + "DataCite Fabrica" end subject = "#{title}: New Account" - account_type = self.class.name == "Provider" ? member_type.humanize : client_type.humanize + account_type = + if instance_of?(Provider) + member_type.humanize + else + client_type.humanize + end responsible_id = (responsible_id || "admin").upcase - text = User.format_message_text(template: "users/welcome.text.erb", title: title, contact_name: name, name: symbol, url: url, reset_url: reset_url) - html = User.format_message_html(template: "users/welcome.html.erb", title: title, contact_name: name, name: symbol, url: url, reset_url: reset_url) + text = + User.format_message_text( + template: "users/welcome.text.erb", + title: title, + contact_name: name, + name: symbol, + url: url, + reset_url: reset_url, + ) + html = + User.format_message_html( + template: "users/welcome.html.erb", + title: title, + contact_name: name, + name: symbol, + url: url, + reset_url: reset_url, + ) - response = User.send_email_message(name: name, email: system_email, subject: subject, text: text, html: html) + response = + User.send_email_message( + name: name, + email: system_email, + subject: subject, + text: text, + html: html, + ) fields = [ { title: "Account ID", value: symbol, short: true }, @@ -46,7 +85,10 @@ def send_welcome_email(responsible_id: nil) { title: "System email", value: system_email, short: true }, { title: "Responsible Account ID", value: responsible_id }, ] - User.send_notification_to_slack(nil, title: subject, level: "good", fields: fields) + User.send_notification_to_slack( + nil, + title: subject, level: "good", fields: fields, + ) response end @@ -54,12 +96,36 @@ def send_welcome_email(responsible_id: nil) def send_delete_email(responsible_id: nil) title = Rails.env.stage? ? "DataCite Fabrica Test" : "DataCite Fabrica" subject = "#{title}: Account Deleted" - account_type = self.class.name == "Provider" ? member_type.humanize : client_type.humanize + account_type = + if instance_of?(Provider) + member_type.humanize + else + client_type.humanize + end responsible_id ||= "ADMIN" - text = User.format_message_text(template: "users/delete.text.erb", title: title, contact_name: name, name: symbol) - html = User.format_message_html(template: "users/delete.html.erb", title: title, contact_name: name, name: symbol) + text = + User.format_message_text( + template: "users/delete.text.erb", + title: title, + contact_name: name, + name: symbol, + ) + html = + User.format_message_html( + template: "users/delete.html.erb", + title: title, + contact_name: name, + name: symbol, + ) - response = User.send_email_message(name: name, email: system_email, subject: subject, text: text, html: html) + response = + User.send_email_message( + name: name, + email: system_email, + subject: subject, + text: text, + html: html, + ) fields = [ { title: "Account ID", value: symbol, short: true }, @@ -68,7 +134,10 @@ def send_delete_email(responsible_id: nil) { title: "System email", value: system_email, short: true }, { title: "Responsible Account ID", value: responsible_id }, ] - User.send_notification_to_slack(nil, title: subject, level: "warning", fields: fields) + User.send_notification_to_slack( + nil, + title: subject, level: "warning", fields: fields, + ) response end @@ -76,7 +145,9 @@ def send_delete_email(responsible_id: nil) module ClassMethods # icon for Slack messages - SLACK_ICON_URL = "https://github.com/datacite/segugio/blob/master/source/images/fabrica.png".freeze + SLACK_ICON_URL = + "https://github.com/datacite/segugio/blob/master/source/images/fabrica.png" + class NoOpHTTPClient def self.post(_uri, params = {}) @@ -85,26 +156,59 @@ def self.post(_uri, params = {}) end end - def format_message_text(template: nil, title: nil, contact_name: nil, name: nil, url: nil, reset_url: nil) + def format_message_text( + template: nil, + title: nil, + contact_name: nil, + name: nil, + url: nil, + reset_url: nil + ) ActionController::Base.render( - assigns: { title: title, contact_name: contact_name, name: name, url: url, reset_url: reset_url }, + assigns: { + title: title, + contact_name: contact_name, + name: name, + url: url, + reset_url: reset_url, + }, template: template, layout: false, ) end - def format_message_html(template: nil, title: nil, contact_name: nil, name: nil, url: nil, reset_url: nil) - input = ActionController::Base.render( - assigns: { title: title, contact_name: contact_name, name: name, url: url, reset_url: reset_url }, - template: template, - layout: "application", - ) + def format_message_html( + template: nil, + title: nil, + contact_name: nil, + name: nil, + url: nil, + reset_url: nil + ) + input = + ActionController::Base.render( + assigns: { + title: title, + contact_name: contact_name, + name: name, + url: url, + reset_url: reset_url, + }, + template: template, + layout: "application", + ) - premailer = Premailer.new(input, with_html_string: true, warn_level: Premailer::Warnings::SAFE) + premailer = + Premailer.new( + input, + with_html_string: true, warn_level: Premailer::Warnings::SAFE, + ) premailer.to_inline_css end - def send_email_message(name: nil, email: nil, subject: nil, text: nil, html: nil) + def send_email_message( + name: nil, email: nil, subject: nil, text: nil, html: nil + ) mg_client = Mailgun::Client.new ENV["MAILGUN_API_KEY"] mg_client.enable_test_mode! if Rails.env.test? mb_obj = Mailgun::MessageBuilder.new @@ -132,17 +236,16 @@ def send_notification_to_slack(text, options = {}) }.compact # don't send message to Slack API in test and development environments - notifier = if Rails.env.test? || Rails.env.development? - Slack::Notifier.new ENV["SLACK_WEBHOOK_URL"], - username: "Fabrica", - icon_url: SLACK_ICON_URL do - http_client NoOpHTTPClient - end - else - Slack::Notifier.new ENV["SLACK_WEBHOOK_URL"], - username: "Fabrica", - icon_url: SLACK_ICON_URL - end + notifier = + if Rails.env.test? || Rails.env.development? + Slack::Notifier.new ENV["SLACK_WEBHOOK_URL"], + username: "Fabrica", icon_url: SLACK_ICON_URL do + http_client NoOpHTTPClient + end + else + Slack::Notifier.new ENV["SLACK_WEBHOOK_URL"], + username: "Fabrica", icon_url: SLACK_ICON_URL + end response = notifier.ping attachments: [attachment] response.first.body diff --git a/app/models/concerns/metadatable.rb b/app/models/concerns/metadatable.rb index 092728cbe..5b62aca91 100644 --- a/app/models/concerns/metadatable.rb +++ b/app/models/concerns/metadatable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Metadatable extend ActiveSupport::Concern @@ -7,7 +9,7 @@ def get_doi_ra(doi, options = {}) options[:timeout] ||= 120 doi = CGI.unescape(clean_doi(doi)) - prefix_string = Array(/^(10\.\d{4,5})\/.+/.match(doi)).last + prefix_string = Array(%r{^(10\.\d{4,5})/.+}.match(doi)).last return {} if prefix_string.blank? # return registration agency cached in Redis if it exists and not test @@ -19,7 +21,9 @@ def get_doi_ra(doi, options = {}) url = "http://doi.crossref.org/doiRA/#{doi}" response = Maremma.get(url, options.merge(host: true)) - response["errors"] = [{ "status" => 400, "title" => response["data"] }] if response["data"].is_a?(String) + if response["data"].is_a?(String) + response["errors"] = [{ "status" => 400, "title" => response["data"] }] + end return response["errors"] if response["errors"].present? ra = response.fetch("data", [{}]).first.fetch("RA", nil) @@ -30,7 +34,8 @@ def get_doi_ra(doi, options = {}) redis.set prefix_string, ra unless options[:test] ra else - error = response.fetch("data", [{}]).first.fetch("status", "An error occured") + error = + response.fetch("data", [{}]).first.fetch("status", "An error occured") { "errors" => [{ "title" => error, "status" => 400 }] } end end diff --git a/app/models/concerns/modelable.rb b/app/models/concerns/modelable.rb index 6b77e5fb6..8cc773432 100644 --- a/app/models/concerns/modelable.rb +++ b/app/models/concerns/modelable.rb @@ -1,11 +1,14 @@ +# frozen_string_literal: true + module Modelable extend ActiveSupport::Concern module ClassMethods def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end @@ -16,9 +19,9 @@ def orcid_as_url(orcid) end def orcid_from_url(url) - if /\A(?:(http|https):\/\/(orcid.org)\/)(.+)\z/.match?(url) + if %r{\A(?:(http|https)://(orcid.org)/)(.+)\z}.match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").upcase + uri.path.gsub(%r{^/}, "").upcase end end end diff --git a/app/models/concerns/passwordable.rb b/app/models/concerns/passwordable.rb index 1f3afada2..e1d0b37a4 100644 --- a/app/models/concerns/passwordable.rb +++ b/app/models/concerns/passwordable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Passwordable extend ActiveSupport::Concern @@ -6,9 +8,13 @@ module Passwordable included do # "yes", "not set" (used in serializer) and a blank value are not allowed for new password def encrypt_password_sha256(password) - return nil unless ENV["SESSION_ENCRYPTED_COOKIE_SALT"].present? && password.present? + unless ENV["SESSION_ENCRYPTED_COOKIE_SALT"].present? && password.present? + return nil + end - Digest::SHA256.hexdigest password.to_s + "{" + ENV["SESSION_ENCRYPTED_COOKIE_SALT"] + "}" + Digest::SHA256.hexdigest password.to_s + "{" + + ENV["SESSION_ENCRYPTED_COOKIE_SALT"] + + "}" end def authenticate_sha256(unencrypted_password) diff --git a/app/models/concerns/processable.rb b/app/models/concerns/processable.rb index 58b5415c0..08dec49ab 100644 --- a/app/models/concerns/processable.rb +++ b/app/models/concerns/processable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Processable extend ActiveSupport::Concern diff --git a/app/models/concerns/searchable.rb b/app/models/concerns/searchable.rb index 304333f90..7feee7dea 100644 --- a/app/models/concerns/searchable.rb +++ b/app/models/concerns/searchable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Searchable extend ActiveSupport::Concern extend ActiveModel::Naming @@ -23,9 +25,7 @@ def get_data(options = {}) end def parse_items(items, options = {}) - Array(items).map do |item| - parse_item(item, options) - end + Array(items).map { |item| parse_item(item, options) } end def parse_item(item, options = {}) diff --git a/app/models/concerns/userable.rb b/app/models/concerns/userable.rb index 3a7ab8e81..e9226d2e1 100644 --- a/app/models/concerns/userable.rb +++ b/app/models/concerns/userable.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module Userable extend ActiveSupport::Concern @@ -6,10 +8,18 @@ def remove_users(id: nil, jwt: nil) result = Maremma.get user_url Array.wrap(result.body["data"]).each do |user| url = ENV["VOLPINO_URL"] + "/users/" + user.fetch("id") - data = { "data" => { "attributes" => { id => nil }, - "type" => "users" } } + data = { + "data" => { "attributes" => { id => nil }, "type" => "users" }, + } - result = Maremma.patch(url, content_type: "application/vnd.api+json", accept: "application/vnd.api+json", bearer: jwt, data: data.to_json) + result = + Maremma.patch( + url, + content_type: "application/vnd.api+json", + accept: "application/vnd.api+json", + bearer: jwt, + data: data.to_json, + ) logger.info result.inspect end end diff --git a/app/models/concerns/wikidatable.rb b/app/models/concerns/wikidatable.rb index e8cbd16b9..813bcbbe6 100644 --- a/app/models/concerns/wikidatable.rb +++ b/app/models/concerns/wikidatable.rb @@ -18,7 +18,10 @@ def find_by_wikidata_id(wikidata_id) def fetch_wikidata_by_id(wikidata_id) return {} if wikidata_id.blank? - url = "https://www.wikidata.org/w/api.php?action=wbgetentities&ids=#{wikidata_id}&languages=en&props=labels|descriptions|claims&format=json" + url = + "https://www.wikidata.org/w/api.php?action=wbgetentities&ids=#{ + wikidata_id + }&languages=en&props=labels|descriptions|claims&format=json" response = Maremma.get(url, host: true) @@ -32,12 +35,23 @@ def parse_wikidata_message(id: nil, message: nil) claims = message.dig("entities", id, "claims") || {} twitter = claims.dig("P2002", 0, "mainsnak", "datavalue", "value") - inception = claims.dig("P571", 0, "mainsnak", "datavalue", "value", "time") + inception = + claims.dig("P571", 0, "mainsnak", "datavalue", "value", "time") # extract year, e.g. +1961-00-00 to 1961 inception_year = inception[1..4] if inception.present? - geolocation = claims.dig("P625", 0, "mainsnak", "datavalue", "value") || + geolocation = + claims.dig("P625", 0, "mainsnak", "datavalue", "value") || claims.dig("P625", 0, "datavalue", "value") || - claims.dig("P159", 0, "qualifiers", "P625", 0, "datavalue", "value") || {} + claims.dig( + "P159", + 0, + "qualifiers", + "P625", + 0, + "datavalue", + "value", + ) || + {} ringgold = claims.dig("P3500", 0, "mainsnak", "datavalue", "value") Hashie::Mash.new( @@ -54,21 +68,24 @@ def parse_wikidata_message(id: nil, message: nil) def wikidata_query(employment) return [] if employment.blank? - ringgold_filter = Array.wrap(employment).reduce([]) do |sum, f| - sum << f["ringgold"] if f["ringgold"] + ringgold_filter = + Array.wrap(employment).reduce([]) do |sum, f| + sum << f["ringgold"] if f["ringgold"] - sum - end.join('", "') + sum + end.join("\", \"") - grid_filter = Array.wrap(employment).reduce([]) do |sum, f| - sum << f["grid"] if f["grid"] + grid_filter = + Array.wrap(employment).reduce([]) do |sum, f| + sum << f["grid"] if f["grid"] - sum - end.join('", "') + sum + end.join("\", \"") - user_agent = "Mozilla/5.0 (compatible; Maremma/4.7.1; mailto:info@datacite.org)" + user_agent = + "Mozilla/5.0 (compatible; Maremma/4.7.1; mailto:info@datacite.org)" endpoint = "https://query.wikidata.org/sparql" - sparql = <<"SPARQL".chop + sparql = <<"SPARQL". PREFIX wikibase: PREFIX wd: PREFIX wdt: @@ -81,27 +98,40 @@ def wikidata_query(employment) OPTIONAL { ?item wdt:P2427 ?grid. } OPTIONAL { ?item wdt:P3500 ?ringgold. } - FILTER(?ringgold in ("#{ringgold_filter}") || ?grid in ("#{grid_filter}")). + FILTER(?ringgold in ("#{ + ringgold_filter + }") || ?grid in ("#{ + grid_filter + }")). SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE]" . } } SPARQL + chop - client = SPARQL::Client.new(endpoint, - method: :get, - headers: { "User-Agent" => user_agent }) + client = + SPARQL::Client.new( + endpoint, + method: :get, headers: { "User-Agent" => user_agent }, + ) response = client.query(sparql) - ringgold_to_ror = Array.wrap(response).reduce({}) do |sum, r| - sum[r[:ringgold].to_s] = "https://ror.org/" + r[:ror] if r[:ror] && r[:ringgold] - sum - end + ringgold_to_ror = + Array.wrap(response).reduce({}) do |sum, r| + if r[:ror] && r[:ringgold] + sum[r[:ringgold].to_s] = "https://ror.org/" + r[:ror] + end + sum + end - grid_to_ror = Array.wrap(response).reduce({}) do |sum, r| - sum[r[:grid].to_s] = "https://ror.org/" + r[:ror] if r[:ror] && r[:grid] - sum - end + grid_to_ror = + Array.wrap(response).reduce({}) do |sum, r| + if r[:ror] && r[:grid] + sum[r[:grid].to_s] = "https://ror.org/" + r[:ror] + end + sum + end Array.wrap(employment).reduce([]) do |sum, e| if ringgold_to_ror[e["ringgold"]] diff --git a/app/models/data_catalog.rb b/app/models/data_catalog.rb index e513c0cd6..946b93cee 100644 --- a/app/models/data_catalog.rb +++ b/app/models/data_catalog.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DataCatalog # include helper module for PORO models include Modelable @@ -9,7 +11,9 @@ def self.find_by_id(id) url = "https://api.datacite.org/re3data/#{doi}" response = Maremma.get(url, host: true) - return {} if response.status != 200 || response.body.dig("data", "id") != doi.upcase + if response.status != 200 || response.body.dig("data", "id") != doi.upcase + return {} + end message = response.body.dig("data", "attributes") data = [parse_message(id: id, message: message)] @@ -41,17 +45,16 @@ def self.query(query, options = {}) return [] if response.status != 200 - data = Array.wrap(response.body.fetch("data", nil)).map do |message| - parse_message(id: doi_as_url(message["id"]), message: message["attributes"]) - end + data = + Array.wrap(response.body.fetch("data", nil)).map do |message| + parse_message( + id: doi_as_url(message["id"]), message: message["attributes"], + ) + end meta = { "total" => response.body.dig("meta", "total") } errors = response.body.fetch("errors", nil) - { - data: data, - meta: meta, - errors: errors, - } + { data: data, meta: meta, errors: errors } end def self.parse_message(id: nil, message: nil) diff --git a/app/models/datacite_doi.rb b/app/models/datacite_doi.rb index 223975bd8..a7551fc2d 100644 --- a/app/models/datacite_doi.rb +++ b/app/models/datacite_doi.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DataciteDoi < Doi include Elasticsearch::Model @@ -20,13 +22,25 @@ class DataciteDoi < Doi # TODO remove query for type once STI is enabled def self.import_by_ids(options = {}) - from_id = (options[:from_id] || DataciteDoi.where(type: "DataciteDoi").minimum(:id)).to_i - until_id = (options[:until_id] || DataciteDoi.where(type: "DataciteDoi").maximum(:id)).to_i + from_id = + (options[:from_id] || DataciteDoi.where(type: "DataciteDoi").minimum(:id)). + to_i + until_id = + ( + options[:until_id] || + DataciteDoi.where(type: "DataciteDoi").maximum(:id) + ). + to_i # get every id between from_id and end_id (from_id..until_id).step(500).each do |id| DataciteDoiImportByIdJob.perform_later(options.merge(id: id)) - Rails.logger.info "Queued importing for DataCite DOIs with IDs starting with #{id}." unless Rails.env.test? + unless Rails.env.test? + Rails. + logger.info "Queued importing for DataCite DOIs with IDs starting with #{ + id + }." + end end (from_id..until_id).to_a.length @@ -36,25 +50,38 @@ def self.import_by_id(options = {}) return nil if options[:id].blank? id = options[:id].to_i - index = if Rails.env.test? - index_name - elsif options[:index].present? - options[:index] - else - inactive_index - end + index = + if Rails.env.test? + index_name + elsif options[:index].present? + options[:index] + else + inactive_index + end errors = 0 count = 0 # TODO remove query for type once STI is enabled - DataciteDoi.where(type: "DataciteDoi").where(id: id..(id + 499)).find_in_batches(batch_size: 500) do |dois| - response = DataciteDoi.__elasticsearch__.client.bulk \ - index: index, - type: DataciteDoi.document_type, - body: dois.map { |doi| { index: { _id: doi.id, data: doi.as_indexed_json } } } + DataciteDoi.where(type: "DataciteDoi").where(id: id..(id + 499)). + find_in_batches(batch_size: 500) do |dois| + response = + DataciteDoi.__elasticsearch__.client.bulk index: index, + type: + DataciteDoi.document_type, + body: + dois.map { |doi| + { + index: { + _id: doi.id, + data: + doi.as_indexed_json, + }, + } + } # try to handle errors - errors_in_response = response["items"].select { |k, _v| k.values.first["error"].present? } + errors_in_response = + response["items"].select { |k, _v| k.values.first["error"].present? } errors += errors_in_response.length errors_in_response.each do |item| Rails.logger.error "[Elasticsearch] " + item.inspect @@ -66,24 +93,35 @@ def self.import_by_id(options = {}) end if errors > 1 - Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} DataCite DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[Elasticsearch] #{errors} errors importing #{ + count + } DataCite DOIs with IDs #{id} - #{id + 499}." elsif count > 0 - Rails.logger.info "[Elasticsearch] Imported #{count} DataCite DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{ + count + } DataCite DOIs with IDs #{id} - #{id + 499}." end count - rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.info "[Elasticsearch] Error #{e.message} importing DataCite DOIs with IDs #{id} - #{(id + 499)}." + rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, + Faraday::ConnectionFailed, + ActiveRecord::LockWaitTimeout => e + Rails.logger.info "[Elasticsearch] Error #{ + e.message + } importing DataCite DOIs with IDs #{id} - #{id + 499}." count = 0 # TODO remove query for type once STI is enabled - DataciteDoi.where(type: "DataciteDoi").where(id: id..(id + 499)).find_each do |doi| + DataciteDoi.where(type: "DataciteDoi").where(id: id..(id + 499)). + find_each do |doi| IndexJob.perform_later(doi) count += 1 end - Rails.logger.info "[Elasticsearch] Imported #{count} DataCite DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{ + count + } DataCite DOIs with IDs #{id} - #{id + 499}." count end diff --git a/app/models/doi.rb b/app/models/doi.rb index 75a8bdbd4..4e7f39712 100644 --- a/app/models/doi.rb +++ b/app/models/doi.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "maremma" require "benchmark" @@ -100,8 +102,8 @@ class Doi < ApplicationRecord validates_presence_of :url, if: Proc.new { |doi| doi.is_registered_or_findable? } # from https://www.crossref.org/blog/dois-and-matching-regular-expressions/ but using uppercase - validates_format_of :doi, with: /\A10\.\d{4,5}\/[-\._;()\/:a-zA-Z0-9\*~\$\=]+\z/, on: :create - validates_format_of :url, with: /\A(ftp|http|https):\/\/[\S]+/, if: :url?, message: "URL is not valid" + validates_format_of :doi, with: /\A10\.\d{4,5}\/[-._;()\/:a-zA-Z0-9*~$=]+\z/, on: :create + validates_format_of :url, with: /\A(ftp|http|https):\/\/\S+/, if: :url?, message: "URL is not valid" validates_uniqueness_of :doi, message: "This DOI has already been taken", unless: :only_validate validates_inclusion_of :agency, in: %w(datacite crossref kisti medra istic jalc airiti cnki op), allow_blank: true validates :last_landing_page_status, numericality: { only_integer: true }, if: :last_landing_page_status? @@ -939,16 +941,16 @@ def self.query(query, options = {}) options[:page][:size] ||= 25 aggregations = if options[:totals_agg] == "provider" - provider_aggregations - elsif options[:totals_agg] == "client" - client_aggregations - elsif options[:totals_agg] == "client_export" - client_export_aggregations - elsif options[:totals_agg] == "prefix" - prefix_aggregations - else - query_aggregations - end + provider_aggregations + elsif options[:totals_agg] == "client" + client_aggregations + elsif options[:totals_agg] == "client_export" + client_export_aggregations + elsif options[:totals_agg] == "prefix" + prefix_aggregations + else + query_aggregations + end # Cursor nav uses search_after, this should always be an array of values that match the sort. if options.fetch(:page, {}).key?(:cursor) @@ -1246,9 +1248,9 @@ def self.import_by_client(client_id: nil) return nil if client.blank? index = if Rails.env.test? - "dois-test" - else - active_index + "dois-test" + else + active_index end errors = 0 count = 0 @@ -1288,12 +1290,12 @@ def self.index_by_id(options = {}) id = options[:id].to_i index = if Rails.env.test? - "dois-test" - elsif options[:index].present? - options[:index] - else - inactive_index - end + "dois-test" + elsif options[:index].present? + options[:index] + else + inactive_index + end errors = 0 count = 0 @@ -1313,14 +1315,14 @@ def self.index_by_id(options = {}) end if errors > 1 - Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} DOIs with IDs #{id} - #{id + 499}." elsif count > 0 - Rails.logger.info "[Elasticsearch] Imported #{count} DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} DOIs with IDs #{id} - #{id + 499}." end count rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.info "[Elasticsearch] Error #{e.message} importing DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Error #{e.message} importing DOIs with IDs #{id} - #{id + 499}." count = 0 @@ -1329,7 +1331,7 @@ def self.index_by_id(options = {}) count += 1 end - Rails.logger.info "[Elasticsearch] Imported #{count} DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} DOIs with IDs #{id} - #{id + 499}." count end @@ -1573,11 +1575,11 @@ def self.convert_affiliation_by_id(options = {}) end end - Rails.logger.info "[MySQL] Converted affiliations for #{count} DOIs with IDs #{id} - #{(id + 499)}." if count > 0 + Rails.logger.info "[MySQL] Converted affiliations for #{count} DOIs with IDs #{id} - #{id + 499}." if count > 0 count rescue TypeError, ActiveRecord::ActiveRecordError, ActiveRecord::LockWaitTimeout => e - Rails.logger.error "[MySQL] Error converting affiliations for DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[MySQL] Error converting affiliations for DOIs with IDs #{id} - #{id + 499}." count end @@ -1628,11 +1630,11 @@ def self.convert_container_by_id(options = {}) end end - Rails.logger.info "[MySQL] Converted containers for #{count} DOIs with IDs #{id} - #{(id + 499)}." if count > 0 + Rails.logger.info "[MySQL] Converted containers for #{count} DOIs with IDs #{id} - #{id + 499}." if count > 0 count rescue TypeError, ActiveRecord::ActiveRecordError, ActiveRecord::LockWaitTimeout => e - Rails.logger.error "[MySQL] Error converting containers for DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[MySQL] Error converting containers for DOIs with IDs #{id} - #{id + 499}." count end @@ -2163,8 +2165,8 @@ def update_language lang = language.to_s.split("-").first entry = ISO_639.find_by(code: lang) || ISO_639.find_by(english_name: lang.upcase_first) self.language = if entry.present? && entry.alpha2.present? - entry.alpha2 - end + entry.alpha2 + end end def update_field_of_science @@ -2248,56 +2250,54 @@ def self.migrate_landing_page(_options = {}) # Handle camel casing first. Doi.where.not("last_landing_page_status_result" => nil).find_each do |doi| - begin - # First we try and fix into camel casing - result = doi.last_landing_page_status_result - mappings = { - "body-has-pid" => "bodyHasPid", - "dc-identifier" => "dcIdentifier", - "citation-doi" => "citationDoi", - "redirect-urls" => "redirectUrls", - "schema-org-id" => "schemaOrgId", - "has-schema-org" => "hasSchemaOrg", - "redirect-count" => "redirectCount", - "download-latency" => "downloadLatency", - } - result = result.map { |k, v| [mappings[k] || k, v] }.to_h - # doi.update_columns("last_landing_page_status_result": result) - - # Do a fix of the stored download Latency - # Sometimes was floating point precision, we dont need this - download_latency = result["downloadLatency"] - download_latency = download_latency.nil? ? download_latency : download_latency.round - - # Try to put the checked date into ISO8601 - # If we dont have one (there was legacy reasons) then set to unix epoch - checked = doi.last_landing_page_status_check - checked = checked.nil? ? Time.at(0) : checked - checked = checked.iso8601 - - # Next we want to build a new landing_page result. - landing_page = { - "checked" => checked, - "status" => doi.last_landing_page_status, - "url" => doi.last_landing_page, - "contentType" => doi.last_landing_page_content_type, - "error" => result["error"], - "redirectCount" => result["redirectCount"], - "redirectUrls" => result["redirectUrls"], - "downloadLatency" => download_latency, - "hasSchemaOrg" => result["hasSchemaOrg"], - "schemaOrgId" => result["schemaOrgId"], - "dcIdentifier" => result["dcIdentifier"], - "citationDoi" => result["citationDoi"], - "bodyHasPid" => result["bodyHasPid"], - } - - doi.update_columns("landing_page": landing_page) - - Rails.logger.info "Updated " + doi.doi - rescue TypeError, NoMethodError => e - Rails.logger.error "Error updating landing page " + doi.doi + ": " + e.message - end + # First we try and fix into camel casing + result = doi.last_landing_page_status_result + mappings = { + "body-has-pid" => "bodyHasPid", + "dc-identifier" => "dcIdentifier", + "citation-doi" => "citationDoi", + "redirect-urls" => "redirectUrls", + "schema-org-id" => "schemaOrgId", + "has-schema-org" => "hasSchemaOrg", + "redirect-count" => "redirectCount", + "download-latency" => "downloadLatency", + } + result = result.transform_keys { |k| mappings[k] || k } + # doi.update_columns("last_landing_page_status_result": result) + + # Do a fix of the stored download Latency + # Sometimes was floating point precision, we dont need this + download_latency = result["downloadLatency"] + download_latency = download_latency.nil? ? download_latency : download_latency.round + + # Try to put the checked date into ISO8601 + # If we dont have one (there was legacy reasons) then set to unix epoch + checked = doi.last_landing_page_status_check + checked = checked.nil? ? Time.at(0) : checked + checked = checked.iso8601 + + # Next we want to build a new landing_page result. + landing_page = { + "checked" => checked, + "status" => doi.last_landing_page_status, + "url" => doi.last_landing_page, + "contentType" => doi.last_landing_page_content_type, + "error" => result["error"], + "redirectCount" => result["redirectCount"], + "redirectUrls" => result["redirectUrls"], + "downloadLatency" => download_latency, + "hasSchemaOrg" => result["hasSchemaOrg"], + "schemaOrgId" => result["schemaOrgId"], + "dcIdentifier" => result["dcIdentifier"], + "citationDoi" => result["citationDoi"], + "bodyHasPid" => result["bodyHasPid"], + } + + doi.update_columns("landing_page": landing_page) + + Rails.logger.info "Updated " + doi.doi + rescue TypeError, NoMethodError => e + Rails.logger.error "Error updating landing page " + doi.doi + ": " + e.message end "Finished migrating landing pages." @@ -2315,38 +2315,36 @@ def self.add_index_type(options = {}) Rails.logger.info "[migration_index_types] adding type information for DOIs with IDs #{from_id} - #{until_id}." Doi.where(id: from_id..until_id).where("type" => nil).find_each(batch_size: 500) do |doi| - begin - agency = doi.agency - - type = if agency.blank? || agency.casecmp?("datacite") - "DataciteDoi" - elsif agency.casecmp?("crossref") - "OtherDoi" - elsif agency.casecmp?("kisti") - "OtherDoi" - elsif agency.casecmp?("medra") - "OtherDoi" - elsif agency.casecmp?("istic") - "OtherDoi" - elsif agency.casecmp?("jalc") - "OtherDoi" - elsif agency.casecmp?("airiti") - "OtherDoi" - elsif agency.casecmp?("cnki") - "OtherDoi" - elsif agency.casecmp?("op") - "OtherDoi" - else - "DataciteDoi" - end - - doi.update_columns("type" => type) - - count += 1 - Rails.logger.info "Updated #{doi.doi} (#{doi.id})" - rescue StandardError => e - Rails.logger.error "Error updating #{doi.doi} (#{doi.id}), #{e.message}" + agency = doi.agency + + type = if agency.blank? || agency.casecmp?("datacite") + "DataciteDoi" + elsif agency.casecmp?("crossref") + "OtherDoi" + elsif agency.casecmp?("kisti") + "OtherDoi" + elsif agency.casecmp?("medra") + "OtherDoi" + elsif agency.casecmp?("istic") + "OtherDoi" + elsif agency.casecmp?("jalc") + "OtherDoi" + elsif agency.casecmp?("airiti") + "OtherDoi" + elsif agency.casecmp?("cnki") + "OtherDoi" + elsif agency.casecmp?("op") + "OtherDoi" + else + "DataciteDoi" end + + doi.update_columns("type" => type) + + count += 1 + Rails.logger.info "Updated #{doi.doi} (#{doi.id})" + rescue StandardError => e + Rails.logger.error "Error updating #{doi.doi} (#{doi.id}), #{e.message}" end "Finished updating dois, total #{count}" diff --git a/app/models/event.rb b/app/models/event.rb index 3651a319f..a1a6d0ea4 100644 --- a/app/models/event.rb +++ b/app/models/event.rb @@ -18,8 +18,18 @@ class Event < ApplicationRecord include Elasticsearch::Model - belongs_to :doi_for_source, class_name: "Doi", primary_key: :doi, foreign_key: :source_doi, touch: true, optional: true - belongs_to :doi_for_target, class_name: "Doi", primary_key: :doi, foreign_key: :target_doi, touch: true, optional: true + belongs_to :doi_for_source, + class_name: "Doi", + primary_key: :doi, + foreign_key: :source_doi, + touch: true, + optional: true + belongs_to :doi_for_target, + class_name: "Doi", + primary_key: :doi, + foreign_key: :target_doi, + touch: true, + optional: true before_validation :set_defaults before_create :set_source_and_target_doi @@ -35,15 +45,15 @@ class Event < ApplicationRecord #  Reset after failure event :reset do - transitions from: [:failed], to: :waiting + transitions from: %i[failed], to: :waiting end event :start do - transitions from: [:waiting], to: :working + transitions from: %i[waiting], to: :working end event :finish do - transitions from: [:working], to: :done + transitions from: %i[working], to: :done end event :error do @@ -66,41 +76,49 @@ class Event < ApplicationRecord alias_attribute :created, :created_at alias_attribute :updated, :updated_at - INCLUDED_RELATION_TYPES = [ - "cites", "is-cited-by", - "is-supplement-to", "is-supplemented-by", - "references", "is-referenced-by" + INCLUDED_RELATION_TYPES = %w[ + cites + is-cited-by + is-supplement-to + is-supplemented-by + references + is-referenced-by ].freeze # renamed to make it clearer that these relation types are grouped together as references - REFERENCE_RELATION_TYPES = [ - "is-cited-by", - "is-supplement-to", - "references", - ].freeze + REFERENCE_RELATION_TYPES = %w[is-cited-by is-supplement-to references].freeze # renamed to make it clearer that these relation types are grouped together as citations - CITATION_RELATION_TYPES = [ - "cites", - "is-supplemented-by", - "is-referenced-by", - ].freeze - - RELATIONS_RELATION_TYPES = [ - "compiles", "is-compiled-by", - "documents", "is-documented-by", - "has-metadata", "is-metadata-for", - "is-derived-from", "is-source-of", - "reviews", "is-reviewed-by", - "requires", "is-required-by", - "continues", "is-coutinued-by", - "has-version", "is-version-of", - "has-part", "is-part-of", - "is-variant-from-of", "is-original-form-of", - "is-identical-to", "obsoletes", - "is-obsolete-by", - "is-new-version-of", "is-previous-version-of", - "describes", "is-described-by" + CITATION_RELATION_TYPES = %w[cites is-supplemented-by is-referenced-by].freeze + + RELATIONS_RELATION_TYPES = %w[ + compiles + is-compiled-by + documents + is-documented-by + has-metadata + is-metadata-for + is-derived-from + is-source-of + reviews + is-reviewed-by + requires + is-required-by + continues + is-coutinued-by + has-version + is-version-of + has-part + is-part-of + is-variant-from-of + is-original-form-of + is-identical-to + obsoletes + is-obsolete-by + is-new-version-of + is-previous-version-of + describes + is-described-by ].freeze validates :subj_id, :source_id, :source_token, presence: true @@ -117,58 +135,70 @@ class Event < ApplicationRecord end mapping dynamic: "false" do - indexes :uuid, type: :keyword - indexes :subj_id, type: :keyword - indexes :obj_id, type: :keyword - indexes :doi, type: :keyword - indexes :orcid, type: :keyword - indexes :prefix, type: :keyword - indexes :subtype, type: :keyword - indexes :citation_type, type: :keyword - indexes :issn, type: :keyword - indexes :subj, type: :object, properties: { - type: { type: :keyword }, - id: { type: :keyword }, - uid: { type: :keyword }, - proxyIdentifiers: { type: :keyword }, - datePublished: { type: :date, format: "date_optional_time||yyyy-MM-dd||yyyy-MM||yyyy", ignore_malformed: true }, - registrantId: { type: :keyword }, - cache_key: { type: :keyword }, - } - indexes :obj, type: :object, properties: { - type: { type: :keyword }, - id: { type: :keyword }, - uid: { type: :keyword }, - proxyIdentifiers: { type: :keyword }, - datePublished: { type: :date, format: "date_optional_time||yyyy-MM-dd||yyyy-MM||yyyy", ignore_malformed: true }, - registrantId: { type: :keyword }, - cache_key: { type: :keyword }, - } - indexes :source_doi, type: :keyword - indexes :target_doi, type: :keyword + indexes :uuid, type: :keyword + indexes :subj_id, type: :keyword + indexes :obj_id, type: :keyword + indexes :doi, type: :keyword + indexes :orcid, type: :keyword + indexes :prefix, type: :keyword + indexes :subtype, type: :keyword + indexes :citation_type, type: :keyword + indexes :issn, type: :keyword + indexes :subj, + type: :object, + properties: { + type: { type: :keyword }, + id: { type: :keyword }, + uid: { type: :keyword }, + proxyIdentifiers: { type: :keyword }, + datePublished: { + type: :date, + format: "date_optional_time||yyyy-MM-dd||yyyy-MM||yyyy", + ignore_malformed: true, + }, + registrantId: { type: :keyword }, + cache_key: { type: :keyword }, + } + indexes :obj, + type: :object, + properties: { + type: { type: :keyword }, + id: { type: :keyword }, + uid: { type: :keyword }, + proxyIdentifiers: { type: :keyword }, + datePublished: { + type: :date, + format: "date_optional_time||yyyy-MM-dd||yyyy-MM||yyyy", + ignore_malformed: true, + }, + registrantId: { type: :keyword }, + cache_key: { type: :keyword }, + } + indexes :source_doi, type: :keyword + indexes :target_doi, type: :keyword indexes :source_relation_type_id, type: :keyword indexes :target_relation_type_id, type: :keyword - indexes :source_id, type: :keyword - indexes :source_token, type: :keyword - indexes :message_action, type: :keyword + indexes :source_id, type: :keyword + indexes :source_token, type: :keyword + indexes :message_action, type: :keyword indexes :relation_type_id, type: :keyword - indexes :registrant_id, type: :keyword - indexes :access_method, type: :keyword - indexes :metric_type, type: :keyword - indexes :total, type: :integer - indexes :license, type: :text, fields: { keyword: { type: "keyword" } } - indexes :error_messages, type: :object - indexes :callback, type: :text - indexes :aasm_state, type: :keyword - indexes :state_event, type: :keyword - indexes :year_month, type: :keyword - indexes :created_at, type: :date - indexes :updated_at, type: :date - indexes :indexed_at, type: :date - indexes :occurred_at, type: :date - indexes :citation_id, type: :keyword - indexes :citation_year, type: :integer - indexes :cache_key, type: :keyword + indexes :registrant_id, type: :keyword + indexes :access_method, type: :keyword + indexes :metric_type, type: :keyword + indexes :total, type: :integer + indexes :license, type: :text, fields: { keyword: { type: "keyword" } } + indexes :error_messages, type: :object + indexes :callback, type: :text + indexes :aasm_state, type: :keyword + indexes :state_event, type: :keyword + indexes :year_month, type: :keyword + indexes :created_at, type: :date + indexes :updated_at, type: :date + indexes :indexed_at, type: :date + indexes :occurred_at, type: :date + indexes :citation_id, type: :keyword + indexes :citation_year, type: :integer + indexes :cache_key, type: :keyword end def as_indexed_json(_options = {}) @@ -216,7 +246,7 @@ def citation_id end def self.query_fields - ["subj_id^10", "obj_id^10", "source_id", "relation_type_id"] + %w[subj_id^10 obj_id^10 source_id relation_type_id] end def self.query_aggregations @@ -224,53 +254,47 @@ def self.query_aggregations sources: { terms: { field: "source_id", size: 10, min_doc_count: 1 } }, prefixes: { terms: { field: "prefix", size: 10, min_doc_count: 1 } }, registrants: { - terms: { - field: "registrant_id", size: 10, min_doc_count: 1 - }, + terms: { field: "registrant_id", size: 10, min_doc_count: 1 }, aggs: { year: { date_histogram: { - field: "occurred_at", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 - }, - aggs: { - bucket_truncate: { - bucket_sort: { size: 10 }, - }, + field: "occurred_at", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, }, }, }, citation_types: { - terms: { - field: "citation_type", size: 10, min_doc_count: 1 - }, + terms: { field: "citation_type", size: 10, min_doc_count: 1 }, aggs: { year_month: { date_histogram: { - field: "occurred_at", interval: "month", format: "yyyy-MM", order: { _key: "desc" }, min_doc_count: 1 - }, - aggs: { - bucket_truncate: { - bucket_sort: { size: 10 }, - }, + field: "occurred_at", + interval: "month", + format: "yyyy-MM", + order: { _key: "desc" }, + min_doc_count: 1, }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, }, }, }, relation_types: { - terms: { - field: "relation_type_id", size: 10, min_doc_count: 1 - }, + terms: { field: "relation_type_id", size: 10, min_doc_count: 1 }, aggs: { year_month: { date_histogram: { - field: "occurred_at", interval: "month", format: "yyyy-MM", order: { _key: "desc" }, min_doc_count: 1 - }, - aggs: { - bucket_truncate: { - bucket_sort: { size: 10 }, - }, + field: "occurred_at", + interval: "month", + format: "yyyy-MM", + order: { _key: "desc" }, + min_doc_count: 1, }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, }, }, }, @@ -283,18 +307,14 @@ def self.find_by_id(ids, options = {}) options[:page] ||= {} options[:page][:number] ||= 1 - options[:page][:size] ||= 1000 + options[:page][:size] ||= 1_000 options[:sort] ||= { created_at: { order: "asc" } } __elasticsearch__.search( from: (options.dig(:page, :number) - 1) * options.dig(:page, :size), size: options.dig(:page, :size), sort: [options[:sort]], - query: { - terms: { - uuid: ids, - }, - }, + query: { terms: { uuid: ids } }, aggregations: query_aggregations, ) end @@ -306,7 +326,11 @@ def self.import_by_ids(options = {}) # get every id between from_id and until_id (from_id..until_id).step(500).each do |id| EventImportByIdJob.perform_later(options.merge(id: id)) - Rails.logger.info "Queued importing for events with IDs starting with #{id}." unless Rails.env.test? + unless Rails.env.test? + Rails.logger.info "Queued importing for events with IDs starting with #{ + id + }." + end end end @@ -314,38 +338,58 @@ def self.import_by_id(options = {}) return nil if options[:id].blank? id = options[:id].to_i - index = if Rails.env.test? - "events-test" - elsif options[:index].present? - options[:index] - else - inactive_index - end + index = + if Rails.env.test? + "events-test" + elsif options[:index].present? + options[:index] + else + inactive_index + end errors = 0 count = 0 - Event.where(id: id..(id + 499)).find_in_batches(batch_size: 500) do |events| - response = Event.__elasticsearch__.client.bulk \ - index: index, - type: Event.document_type, - body: events.map { |event| { index: { _id: event.id, data: event.as_indexed_json } } } + Event.where(id: id..(id + 499)).find_in_batches( + batch_size: 500, + ) do |events| + response = + Event.__elasticsearch__.client.bulk index: index, + type: Event.document_type, + body: + events.map { |event| + { + index: { + _id: event.id, + data: event.as_indexed_json, + }, + } + } # log errors - errors += response["items"].map { |k, _v| k.values.first["error"] }.compact.length - response["items"].select { |k, _v| k.values.first["error"].present? }.each do |err| - Rails.logger.error "[Elasticsearch] " + err.inspect - end + errors += + response["items"].map { |k, _v| k.values.first["error"] }.compact.length + response["items"].select do |k, _v| + k.values.first["error"].present? + end.each { |err| Rails.logger.error "[Elasticsearch] " + err.inspect } count += events.length end if errors > 1 - Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} events with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[Elasticsearch] #{errors} errors importing #{ + count + } events with IDs #{id} - #{id + 499}." elsif count > 0 - Rails.logger.info "[Elasticsearch] Imported #{count} events with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} events with IDs #{ + id + } - #{id + 499}." end - rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.info "[Elasticsearch] Error #{e.message} importing events with IDs #{id} - #{(id + 499)}." + rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, + Faraday::ConnectionFailed, + ActiveRecord::LockWaitTimeout => e + Rails.logger.info "[Elasticsearch] Error #{ + e.message + } importing events with IDs #{id} - #{id + 499}." count = 0 @@ -354,23 +398,36 @@ def self.import_by_id(options = {}) count += 1 end - Rails.logger.info "[Elasticsearch] Imported #{count} events with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} events with IDs #{ + id + } - #{id + 499}." end def self.update_crossref(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = (options[:cursor] || []) - response = Event.query(nil, source_id: "crossref", page: { size: 1, cursor: [] }) - Rails.logger.info "[Update] #{response.results.total} events for source crossref." + response = + Event.query(nil, source_id: "crossref", page: { size: 1, cursor: [] }) + Rails.logger.info "[Update] #{ + response.results.total + } events for source crossref." # walk through results using cursor if response.results.total > 0 while !response.results.empty? - response = Event.query(nil, source_id: "crossref", page: { size: size, cursor: cursor }) + response = + Event.query( + nil, + source_id: "crossref", page: { size: size, cursor: cursor }, + ) break unless response.results.length.positive? - Rails.logger.info "[Update] Updating #{response.results.length} crossref events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "[Update] Updating #{ + response.results.length + } crossref events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] dois = response.results.map(&:subj_id).uniq @@ -402,21 +459,33 @@ def self.update_datacite_op(options = {}) end def self.update_datacite_ra(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = (options[:cursor] || []) ra = options[:ra] || "crossref" source_id = "datacite-#{ra}" - response = Event.query(nil, source_id: source_id, page: { size: 1, cursor: cursor }) - Rails.logger.info "[Update] #{response.results.total} events for source #{source_id}." + response = + Event.query(nil, source_id: source_id, page: { size: 1, cursor: cursor }) + Rails.logger.info "[Update] #{response.results.total} events for source #{ + source_id + }." # walk through results using cursor + if response.results.total > 0 while !response.results.empty? - response = Event.query(nil, source_id: source_id, page: { size: size, cursor: cursor }) + response = + Event.query( + nil, + source_id: source_id, page: { size: size, cursor: cursor }, + ) break if response.results.empty? - Rails.logger.info "[Update] Updating #{response.results.length} #{source_id} events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "[Update] Updating #{response.results.length} #{ + source_id + } events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] dois = response.results.map(&:obj_id).uniq @@ -430,23 +499,42 @@ def self.update_datacite_ra(options = {}) end def self.update_registrant(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = (options[:cursor] || []) # ra = options[:ra] || "crossref" source_id = options[:source_id] || "datacite-crossref,crossref" citation_type = options[:citation_type] || "Dataset-ScholarlyArticle" query = options[:query] || "registrant_id:*crossref.citations" - response = Event.query(query, source_id: source_id, citation_type: citation_type, page: { size: 1, cursor: cursor }) - Rails.logger.info "[Update] #{response.results.total} events for sources #{source_id}." + response = + Event.query( + query, + source_id: source_id, + citation_type: citation_type, + page: { size: 1, cursor: cursor }, + ) + Rails.logger.info "[Update] #{response.results.total} events for sources #{ + source_id + }." # walk through results using cursor + if response.results.total > 0 while !response.results.empty? - response = Event.query(query, source_id: source_id, citation_type: citation_type, page: { size: size, cursor: cursor }) + response = + Event.query( + query, + source_id: source_id, + citation_type: citation_type, + page: { size: size, cursor: cursor }, + ) break if response.results.empty? - Rails.logger.info "[Update] Updating #{response.results.length} #{source_id} events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "[Update] Updating #{response.results.length} #{ + source_id + } events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] ids = response.results.map(&:uuid).uniq @@ -459,19 +547,36 @@ def self.update_registrant(options = {}) end def self.update_datacite_orcid_auto_update(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = (options[:cursor] || []).to_i - response = Event.query(nil, source_id: "datacite-orcid-auto-update", page: { size: 1, cursor: cursor }) - Rails.logger.info "[Update] #{response.results.total} events for source datacite-orcid-auto-update." + response = + Event.query( + nil, + source_id: "datacite-orcid-auto-update", + page: { size: 1, cursor: cursor }, + ) + Rails.logger.info "[Update] #{ + response.results.total + } events for source datacite-orcid-auto-update." # walk through results using cursor + if response.results.total > 0 while !response.results.empty? - response = Event.query(nil, source_id: "datacite-orcid-auto-update", page: { size: size, cursor: cursor }) + response = + Event.query( + nil, + source_id: "datacite-orcid-auto-update", + page: { size: size, cursor: cursor }, + ) break if response.results.empty? - Rails.logger.info "[Update] Updating #{response.results.length} datacite-orcid-auto-update events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "[Update] Updating #{ + response.results.length + } datacite-orcid-auto-update events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] ids = response.results.map(&:obj_id).uniq @@ -517,9 +622,29 @@ def self.import_doi(id, options = {}) "event" => "publish", } - attrs = %w(creators contributors titles publisher publication_year types descriptions container sizes formats version_info language dates identifiers related_identifiers funding_references geo_locations rights_list subjects content_url).each do |a| - params[a] = meta.send(a.to_s) - end + attrs = + %w[ + creators + contributors + titles + publisher + publication_year + types + descriptions + container + sizes + formats + version_info + language + dates + identifiers + related_identifiers + funding_references + geo_locations + rights_list + subjects + content_url + ].each { |a| params[a] = meta.send(a.to_s) } # if we refresh the metadata if doi.present? @@ -529,9 +654,11 @@ def self.import_doi(id, options = {}) end if doi.save - Rails.logger.info "Record for #{ra} DOI #{doi.doi}" + (options[:refresh] ? " updated." : " created.") + Rails.logger.info "Record for #{ra} DOI #{doi.doi}" + + (options[:refresh] ? " updated." : " created.") else - Rails.logger.error "[Error saving #{ra} DOI #{doi.doi}]: " + doi.errors.messages.inspect + Rails.logger.error "[Error saving #{ra} DOI #{doi.doi}]: " + + doi.errors.messages.inspect end doi @@ -540,13 +667,15 @@ def self.import_doi(id, options = {}) Rails.logger.warn e.message end - def to_param # overridden, use uuid instead of id + def to_param uuid end # import DOIs unless they are from DataCite or are a Crossref Funder ID def dois_to_import - [doi_from_url(subj_id), doi_from_url(obj_id)].compact.reduce([]) do |sum, d| + [doi_from_url(subj_id), doi_from_url(obj_id)].compact.reduce( + [], + ) do |sum, d| prefix = d.split("/", 2).first # ignore Crossref Funder ID @@ -562,16 +691,18 @@ def dois_to_import end def send_callback - data = { "data" => { - "id" => uuid, - "type" => "events", - "state" => aasm_state, - "errors" => error_messages, - "messageAction" => message_action, - "sourceToken" => source_token, - "total" => total, - "timestamp" => timestamp, - } } + data = { + "data" => { + "id" => uuid, + "type" => "events", + "state" => aasm_state, + "errors" => error_messages, + "messageAction" => message_action, + "sourceToken" => source_token, + "total" => total, + "timestamp" => timestamp, + }, + } Maremma.post(callback, data: data.to_json, token: ENV["API_KEY"]) end @@ -582,34 +713,54 @@ def access_method end def self.subj_id_check(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = [options[:from_id], options[:until_id]] - response = Event.query(nil, source_id: "datacite-crossref", page: { size: 1, cursor: [] }) - Rails.logger.warn "[DoubleCheck] #{response.results.total} events for source datacite-crossref." + response = + Event.query( + nil, + source_id: "datacite-crossref", page: { size: 1, cursor: [] }, + ) + Rails.logger.warn "[DoubleCheck] #{ + response.results.total + } events for source datacite-crossref." # walk through results using cursor if response.results.total.positive? while response.results.length.positive? - response = Event.query(nil, source_id: "datacite-crossref", page: { size: size, cursor: cursor }) + response = + Event.query( + nil, + source_id: "datacite-crossref", + page: { size: size, cursor: cursor }, + ) break unless response.results.length.positive? - Rails.logger.warn "[DoubleCheck] DoubleCheck #{response.results.length} events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.warn "[DoubleCheck] DoubleCheck #{ + response.results.length + } events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] Rails.logger.warn "[DoubleCheck] Cursor: #{cursor} " - events = response.results.map { |item| { uuid: item.uuid, subj_id: item.subj_id } } + events = + response.results.map do |item| + { uuid: item.uuid, subj_id: item.subj_id } + end SubjCheckJob.perform_later(events, options) end end end def self.modify_nested_objects(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = [options[:from_id], options[:until_id]] - response = Event.query(nil, page: { size: 1, cursor: [] }) - Rails.logger.info "[modify_nested_objects] #{response.results.total} events for source datacite-crossref." + response = Event.query(nil, page: { size: 1, cursor: [] }) + Rails.logger.info "[modify_nested_objects] #{ + response.results.total + } events for source datacite-crossref." # walk through results using cursor if response.results.total.positive? @@ -617,7 +768,11 @@ def self.modify_nested_objects(options = {}) response = Event.query(nil, page: { size: size, cursor: cursor }) break unless response.results.length.positive? - Rails.logger.info "[modify_nested_objects] modify_nested_objects #{response.results.length} events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "[modify_nested_objects] modify_nested_objects #{ + response.results.length + } events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] Rails.logger.info "[modify_nested_objects] Cursor: #{cursor} " @@ -632,8 +787,12 @@ def self.modify_nested_objects(options = {}) def self.camelcase_nested_objects(uuid) event = Event.find_by(uuid: uuid) if event.present? - subj = event.subj.transform_keys { |key| key.to_s.underscore.camelcase(:lower) } - obj = event.obj.transform_keys { |key| key.to_s.underscore.camelcase(:lower) } + subj = + event.subj.transform_keys do |key| + key.to_s.underscore.camelcase(:lower) + end + obj = + event.obj.transform_keys { |key| key.to_s.underscore.camelcase(:lower) } event.update(subj: subj, obj: obj) end end @@ -641,7 +800,10 @@ def self.camelcase_nested_objects(uuid) def self.label_state_event(event) subj_prefix = event[:subj_id][/(10\.\d{4,5})/, 1] unless Prefix.where(uid: subj_prefix).exists? - Event.find_by(uuid: event[:uuid]).update_attribute(:state_event, "crossref_citations_error") + Event.find_by(uuid: event[:uuid]).update_attribute( + :state_event, + "crossref_citations_error", + ) end end @@ -652,7 +814,7 @@ def self.label_state_event(event) # +query+:: ES query to filter the index # +job_name+:: Acive Job class name of the Job that would be executed on every matched results def self.loop_through_events(options) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i cursor = options[:cursor] || [] filter = options[:filter] || {} label = options[:label] || "" @@ -665,17 +827,20 @@ def self.loop_through_events(options) # walk through results using cursor if response.results.total.positive? while response.results.length.positive? - response = Event.query(query, filter.merge(page: { size: size, cursor: cursor })) + response = + Event.query(query, filter.merge(page: { size: size, cursor: cursor })) break unless response.results.length.positive? - Rails.logger.info "#{label} #{response.results.length} events starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "#{label} #{ + response.results.length + } events starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] Rails.logger.info "#{label} Cursor: #{cursor} " ids = response.results.map(&:uuid).uniq - ids.each do |id| - Object.const_get(job_name).perform_later(id, filter) - end + ids.each { |id| Object.const_get(job_name).perform_later(id, filter) } end end end @@ -688,8 +853,8 @@ def metric_type end def doi - Array.wrap(subj["proxyIdentifiers"]).grep(/\A10\.\d{4,5}\/.+\z/) { $1 } + - Array.wrap(obj["proxyIdentifiers"]).grep(/\A10\.\d{4,5}\/.+\z/) { $1 } + + Array.wrap(subj["proxyIdentifiers"]).grep(%r{\A10\.\d{4,5}/.+\z}) { $1 } + + Array.wrap(obj["proxyIdentifiers"]).grep(%r{\A10\.\d{4,5}/.+\z}) { $1 } + Array.wrap(subj["funder"]).map { |f| doi_from_url(f["@id"]) }.compact + Array.wrap(obj["funder"]).map { |f| doi_from_url(f["@id"]) }.compact + [doi_from_url(subj_id), doi_from_url(obj_id)].compact @@ -717,7 +882,12 @@ def uuid_format end def registrant_id - [subj["registrantId"], obj["registrantId"], subj["providerId"], obj["providerId"]].compact + [ + subj["registrantId"], + obj["registrantId"], + subj["providerId"], + obj["providerId"], + ].compact end def subtype @@ -725,27 +895,33 @@ def subtype end def citation_type - return nil if subj["@type"].blank? || subj["@type"] == "CreativeWork" || obj["@type"].blank? || obj["@type"] == "CreativeWork" + if subj["@type"].blank? || subj["@type"] == "CreativeWork" || + obj["@type"].blank? || + obj["@type"] == "CreativeWork" + return nil + end [subj["@type"], obj["@type"]].compact.sort.join("-") end def doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").downcase + uri.path.gsub(%r{^/}, "").downcase end end def uppercase_doi_from_url(url) - if /\A(?:(http|https):\/\/(dx\.)?(doi.org|handle.test.datacite.org)\/)?(doi:)?(10\.\d{4,5}\/.+)\z/.match?(url) + if %r{\A(?:(http|https)://(dx\.)?(doi.org|handle.test.datacite.org)/)?(doi:)?(10\.\d{4,5}/.+)\z}. + match?(url) uri = Addressable::URI.parse(url) - uri.path.gsub(/^\//, "").upcase + uri.path.gsub(%r{^/}, "").upcase end end def orcid_from_url(url) - Array(/\A(http|https):\/\/orcid\.org\/(.+)/.match(url)).last + Array(%r{\A(http|https)://orcid\.org/(.+)}.match(url)).last end def timestamp @@ -772,9 +948,17 @@ def obj_cache_key end def citation_year - "" unless (INCLUDED_RELATION_TYPES + RELATIONS_RELATION_TYPES).include?(relation_type_id) - subj_publication = subj["datePublished"] || subj["date_published"] || (date_published(subj_id) || year_month) - obj_publication = obj["datePublished"] || obj["date_published"] || (date_published(obj_id) || year_month) + unless (INCLUDED_RELATION_TYPES + RELATIONS_RELATION_TYPES).include?( + relation_type_id, + ) + "" + end + subj_publication = + subj["datePublished"] || subj["date_published"] || + (date_published(subj_id) || year_month) + obj_publication = + obj["datePublished"] || obj["date_published"] || + (date_published(obj_id) || year_month) [subj_publication[0..3].to_i, obj_publication[0..3].to_i].max end @@ -843,6 +1027,8 @@ def set_defaults self.total = 1 if total.blank? self.relation_type_id = "references" if relation_type_id.blank? self.occurred_at = Time.zone.now.utc if occurred_at.blank? - self.license = "https://creativecommons.org/publicdomain/zero/1.0/" if license.blank? - end + if license.blank? + self.license = "https://creativecommons.org/publicdomain/zero/1.0/" + end + end # overridden, use uuid instead of id end diff --git a/app/models/funder.rb b/app/models/funder.rb index 70fef8086..456591af5 100644 --- a/app/models/funder.rb +++ b/app/models/funder.rb @@ -1,15 +1,21 @@ +# frozen_string_literal: true + class Funder # include helper module for PORO models include Modelable def self.find_by_id(id) doi = doi_from_url(id) - return { errors: [{ "status" => 422, "title" => "Not a valid DOI." }] } if doi.blank? + if doi.blank? + return { errors: [{ "status" => 422, "title" => "Not a valid DOI." }] } + end url = "https://api.crossref.org/funders/#{doi}" response = Maremma.get(url, host: true) - return { errors: [{ "status" => 404, "title" => "Not found." }] } if response.status == 404 + if response.status == 404 + return { errors: [{ "status" => 404, "title" => "Not found." }] } + end return {} if response.status != 200 message = response.body.dig("data", "message") @@ -25,7 +31,10 @@ def self.query(query, options = {}) offset = options[:offset] || 0 if query.present? - url = "https://api.crossref.org/funders?query=#{query}&rows=#{rows}&offset=#{offset}" + url = + "https://api.crossref.org/funders?query=#{query}&rows=#{rows}&offset=#{ + offset + }" else url = "https://api.crossref.org/funders?rows=#{rows}&offset=#{offset}" end @@ -34,27 +43,23 @@ def self.query(query, options = {}) return {} if response.status != 200 - data = response.body.dig("data", "message", "items").map do |message| - parse_message(id: "https://doi.org/10.13039/#{message['id']}", message: message) - end + data = + response.body.dig("data", "message", "items").map do |message| + parse_message( + id: "https://doi.org/10.13039/#{message['id']}", message: message, + ) + end meta = { "total" => response.body.dig("data", "message", "total-results") } errors = response.body.fetch("errors", nil) - { - data: data, - meta: meta, - errors: errors, - } + { data: data, meta: meta, errors: errors } end def self.parse_message(id: nil, message: nil) if message["location"].present? c = ISO3166::Country.find_country_by_name(message["location"]) code = c.present? ? c.alpha2 : nil - country = { - "code" => code, - "name" => message["location"], - } + country = { "code" => code, "name" => message["location"] } else country = nil end diff --git a/app/models/handle.rb b/app/models/handle.rb index d3dc7b0c9..4a5301e4f 100644 --- a/app/models/handle.rb +++ b/app/models/handle.rb @@ -1,7 +1,16 @@ +# frozen_string_literal: true + class Handle include Searchable - attr_reader :id, :prefix, :registration_agency, :clients, :providers, :created, :cache_key, :updated + attr_reader :id, + :prefix, + :registration_agency, + :clients, + :providers, + :created, + :cache_key, + :updated RA_HANDLES = { "10.SERV/CROSSREF" => "Crossref", @@ -38,11 +47,7 @@ def provider_ids end def self.get_query_url(options = {}) - if options[:id].present? - "#{url}/#{options[:id]}" - else - url - end + options[:id].present? ? "#{url}/#{options[:id]}" : url end def self.parse_data(result, options = {}) @@ -52,7 +57,10 @@ def self.parse_data(result, options = {}) response_code = result.body.dig("data", "responseCode") return nil unless response_code == 1 - record = result.body.fetch("data", {}).fetch("values", []).detect { |hs| hs["type"] == "HS_SERV" } + record = + result.body.fetch("data", {}).fetch("values", []).detect do |hs| + hs["type"] == "HS_SERV" + end fail ActiveRecord::RecordNotFound if record.blank? diff --git a/app/models/heartbeat.rb b/app/models/heartbeat.rb index e4e0d87ad..2fcb56315 100644 --- a/app/models/heartbeat.rb +++ b/app/models/heartbeat.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "timeout" class Heartbeat diff --git a/app/models/media.rb b/app/models/media.rb index 3002fbbe1..0b0d8dee6 100644 --- a/app/models/media.rb +++ b/app/models/media.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Media < ApplicationRecord include Bolognese::Utils include Bolognese::DoiUtils @@ -9,8 +11,10 @@ class Media < ApplicationRecord alias_attribute :datacite_doi_id, :doi_id validates_presence_of :url - validates_format_of :url, with: /\A(ftp|http|https|gs|s3|dos):\/\/[\S]+/, if: :url? - validates_format_of :media_type, with: /[\S]+\/[\S]+/, if: :media_type? + validates_format_of :url, + with: %r{\A(ftp|http|https|gs|s3|dos)://\S+}, + if: :url? + validates_format_of :media_type, with: %r{\S+/\S+}, if: :media_type? validates_associated :doi belongs_to :doi, foreign_key: :dataset, inverse_of: :media @@ -46,7 +50,8 @@ def doi_id=(value) end def set_defaults - current_media = Media.where(dataset: dataset).order("media.created DESC").first + current_media = + Media.where(dataset: dataset).order("media.created DESC").first self.version = current_media.present? ? current_media.version + 1 : 0 self.media_type = "text/plain" if media_type.blank? self.updated = Time.zone.now.utc.iso8601 diff --git a/app/models/metadata.rb b/app/models/metadata.rb index 641a83ea2..e93f64736 100644 --- a/app/models/metadata.rb +++ b/app/models/metadata.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Metadata < ApplicationRecord include Bolognese::Utils include Bolognese::DoiUtils @@ -42,29 +44,36 @@ def metadata_must_be_valid return nil if doi&.draft? return nil if xml.blank? - doc = Nokogiri::XML(xml, nil, "UTF-8", &:noblanks) + doc = Nokogiri.XML(xml, nil, "UTF-8", &:noblanks) return nil if doc.blank? errors.add(:xml, "XML has no namespace.") && return if namespace.blank? # load XSD from bolognese gem kernel = namespace.to_s.split("/").last - filepath = Bundler.rubygems.find_name("bolognese").first.full_gem_path + "/resources/#{kernel}/metadata.xsd" - schema = Nokogiri::XML::Schema(open(filepath)) + filepath = + Bundler.rubygems.find_name("bolognese").first.full_gem_path + + "/resources/#{kernel}/metadata.xsd" + schema = Nokogiri::XML.Schema(open(filepath)) err = schema.validate(doc).map(&:to_s).unwrap errors.add(:xml, err) if err.present? end def set_metadata_version - current_metadata = Metadata.where(dataset: dataset).order("metadata.created DESC").first - self.metadata_version = current_metadata.present? ? current_metadata.metadata_version + 1 : 0 + current_metadata = + Metadata.where(dataset: dataset).order("metadata.created DESC").first + self.metadata_version = + current_metadata.present? ? current_metadata.metadata_version + 1 : 0 end def set_namespace return nil if xml.blank? - doc = Nokogiri::XML(xml, nil, "UTF-8", &:noblanks) - ns = doc.collect_namespaces.detect { |_k, v| v.start_with?("http://datacite.org/schema/kernel") } + doc = Nokogiri.XML(xml, nil, "UTF-8", &:noblanks) + ns = + doc.collect_namespaces.detect do |_k, v| + v.start_with?("http://datacite.org/schema/kernel") + end self.namespace = Array.wrap(ns).last end end diff --git a/app/models/organization.rb b/app/models/organization.rb index a7fa2df5d..4d8522662 100644 --- a/app/models/organization.rb +++ b/app/models/organization.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Organization # include helper module for working with Wikidata include Wikidatable @@ -24,7 +26,9 @@ def self.find_by_id(id) wikidata = data.dig(0, "wikidata", 0) wikidata_data = find_by(wikidata_id: wikidata) - data = [data.first.reverse_merge(wikidata_data[:data].first)] if wikidata_data + if wikidata_data + data = [data.first.reverse_merge(wikidata_data[:data].first)] + end errors = response.body.fetch("errors", nil) @@ -45,7 +49,9 @@ def self.find_by_grid_id(id) wikidata = data.dig(0, "wikidata", 0) wikidata_data = find_by(wikidata_id: wikidata) - data = [data.first.reverse_merge(wikidata_data[:data].first)] if wikidata_data + if wikidata_data + data = [data.first.reverse_merge(wikidata_data[:data].first)] + end errors = response.body.fetch("errors", nil) @@ -56,7 +62,10 @@ def self.find_by_crossref_funder_id(id) crossref_funder_id = crossref_funder_id_from_url(id) return {} if crossref_funder_id.blank? - url = "https://api.ror.org/organizations?query=\"#{crossref_funder_id.split('/', 2).last}\"" + url = + "https://api.ror.org/organizations?query=\"#{ + crossref_funder_id.split('/', 2).last + }\"" response = Maremma.get(url, host: true) message = response.body.dig("data", "items", 0) || {} @@ -66,7 +75,9 @@ def self.find_by_crossref_funder_id(id) wikidata = data.dig(0, "wikidata", 0) wikidata_data = find_by(wikidata_id: wikidata) - data = [data.first.reverse_merge(wikidata_data[:data].first)] if wikidata_data + if wikidata_data + data = [data.first.reverse_merge(wikidata_data[:data].first)] + end errors = response.body.fetch("errors", nil) @@ -82,7 +93,10 @@ def self.query(query, options = {}) url = "https://api.ror.org/organizations?page=#{page}" url += "&query=#{query}" if query.present? if types.present? && country.present? - url += "&filter=types:#{types.upcase_first},country.country_code:#{country.upcase}" + url += + "&filter=types:#{types.upcase_first},country.country_code:#{ + country.upcase + }" elsif types.present? url += "&filter=types:#{types.upcase_first}" elsif country.present? @@ -93,17 +107,21 @@ def self.query(query, options = {}) return {} if response.status != 200 - data = Array.wrap(response.body.dig("data", "items")).map do |message| - parse_message(message) - end + data = + Array.wrap(response.body.dig("data", "items")).map do |message| + parse_message(message) + end - countries = Array.wrap(response.body.dig("data", "meta", "countries")).map do |hsh| - country = ISO3166::Country[hsh["id"]] + countries = + Array.wrap(response.body.dig("data", "meta", "countries")).map do |hsh| + country = ISO3166::Country[hsh["id"]] - { "id" => hsh["id"], - "title" => country.present? ? country.name : hsh["id"], - "count" => hsh["count"] } - end + { + "id" => hsh["id"], + "title" => country.present? ? country.name : hsh["id"], + "count" => hsh["count"], + } + end meta = { "total" => response.body.dig("data", "number_of_results"), @@ -113,11 +131,7 @@ def self.query(query, options = {}) errors = response.body.fetch("errors", nil) - { - data: data, - meta: meta, - errors: errors, - } + { data: data, meta: meta, errors: errors } end def self.parse_message(message) @@ -126,23 +140,23 @@ def self.parse_message(message) name: message.dig("country", "country_name"), }.compact - labels = Array.wrap(message["labels"]).map do |label| - code = label["iso639"].present? ? label["iso639"].upcase : nil - { - code: code, - name: label["label"], - }.compact - end + labels = + Array.wrap(message["labels"]).map do |label| + code = label["iso639"].present? ? label["iso639"].upcase : nil + { code: code, name: label["label"] }.compact + end # remove whitespace from isni identifier - isni = Array.wrap(message.dig("external_ids", "ISNI", "all")).map do |i| - i.gsub(/ /, "") - end + isni = + Array.wrap(message.dig("external_ids", "ISNI", "all")).map do |i| + i.gsub(/ /, "") + end # add DOI prefix to Crossref Funder ID - fundref = Array.wrap(message.dig("external_ids", "FundRef", "all")).map do |f| - "10.13039/#{f}" - end + fundref = + Array.wrap(message.dig("external_ids", "FundRef", "all")).map do |f| + "10.13039/#{f}" + end Hashie::Mash.new( id: message["id"], @@ -163,15 +177,21 @@ def self.parse_message(message) end def self.ror_id_from_url(url) - i = Array(/\A(https?:\/\/)?(ror\.org\/)?(0\w{6}\d{2})\z/.match(url)).last + i = Array(%r{\A(https?://)?(ror\.org/)?(0\w{6}\d{2})\z}.match(url)).last i = "ror.org/#{i}" if i.present? end def self.crossref_funder_id_from_url(url) - Array(/\A(https?:\/\/)?(dx\.)?(doi.org\/)?(doi:)?(10\.13039\/.+)\z/.match(url)).last + Array( + %r{\A(https?://)?(dx\.)?(doi.org/)?(doi:)?(10\.13039/.+)\z}.match( + url, + ), + ). + last end def self.grid_id_from_url(url) - Array(/\A(https?:\/\/)?(grid\.ac\/)?(institutes\/)?(grid\..+)/.match(url)).last + Array(%r{\A(https?://)?(grid\.ac/)?(institutes/)?(grid\..+)}.match(url)). + last end end diff --git a/app/models/other_doi.rb b/app/models/other_doi.rb index d93e0360c..fdf63c691 100644 --- a/app/models/other_doi.rb +++ b/app/models/other_doi.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class OtherDoi < Doi include Elasticsearch::Model @@ -23,13 +25,20 @@ def set_defaults def self.import_by_ids(options = {}) # TODO remove query for type once STI is enabled - from_id = (options[:from_id] || OtherDoi.where(type: "OtherDoi").minimum(:id)).to_i - until_id = (options[:until_id] || OtherDoi.where(type: "OtherDoi").maximum(:id)).to_i + from_id = + (options[:from_id] || OtherDoi.where(type: "OtherDoi").minimum(:id)).to_i + until_id = + (options[:until_id] || OtherDoi.where(type: "OtherDoi").maximum(:id)).to_i # get every id between from_id and end_id (from_id..until_id).step(500).each do |id| OtherDoiImportByIdJob.perform_later(options.merge(id: id)) - Rails.logger.info "Queued importing for other DOIs with IDs starting with #{id}." unless Rails.env.test? + unless Rails.env.test? + Rails. + logger.info "Queued importing for other DOIs with IDs starting with #{ + id + }." + end end (from_id..until_id).to_a.length @@ -39,25 +48,38 @@ def self.import_by_id(options = {}) return nil if options[:id].blank? id = options[:id].to_i - index = if Rails.env.test? - index_name - elsif options[:index].present? - options[:index] - else - inactive_index - end + index = + if Rails.env.test? + index_name + elsif options[:index].present? + options[:index] + else + inactive_index + end errors = 0 count = 0 # TODO remove query for type once STI is enabled - OtherDoi.where(type: "OtherDoi").where(id: id..(id + 499)).find_in_batches(batch_size: 500) do |dois| - response = OtherDoi.__elasticsearch__.client.bulk \ - index: index, - type: OtherDoi.document_type, - body: dois.map { |doi| { index: { _id: doi.id, data: doi.as_indexed_json } } } + OtherDoi.where(type: "OtherDoi").where(id: id..(id + 499)).find_in_batches( + batch_size: 500, + ) do |dois| + response = + OtherDoi.__elasticsearch__.client.bulk index: index, + type: OtherDoi.document_type, + body: + dois.map { |doi| + { + index: { + _id: doi.id, + data: + doi.as_indexed_json, + }, + } + } # try to handle errors - errors_in_response = response["items"].select { |k, _v| k.values.first["error"].present? } + errors_in_response = + response["items"].select { |k, _v| k.values.first["error"].present? } errors += errors_in_response.length errors_in_response.each do |item| Rails.logger.error "[Elasticsearch] " + item.inspect @@ -69,24 +91,35 @@ def self.import_by_id(options = {}) end if errors > 1 - Rails.logger.error "[Elasticsearch] #{errors} errors importing #{count} other DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.error "[Elasticsearch] #{errors} errors importing #{ + count + } other DOIs with IDs #{id} - #{id + 499}." elsif count > 0 - Rails.logger.info "[Elasticsearch] Imported #{count} other DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{ + count + } other DOIs with IDs #{id} - #{id + 499}." end count - rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, Faraday::ConnectionFailed, ActiveRecord::LockWaitTimeout => e - Rails.logger.info "[Elasticsearch] Error #{e.message} importing other DOIs with IDs #{id} - #{(id + 499)}." + rescue Elasticsearch::Transport::Transport::Errors::RequestEntityTooLarge, + Faraday::ConnectionFailed, + ActiveRecord::LockWaitTimeout => e + Rails.logger.info "[Elasticsearch] Error #{ + e.message + } importing other DOIs with IDs #{id} - #{id + 499}." count = 0 # TODO remove query for type once STI is enabled - OtherDoi.where(type: "OtherDoi").where(id: id..(id + 499)).find_each do |doi| + OtherDoi.where(type: "OtherDoi").where(id: id..(id + 499)). + find_each do |doi| IndexJob.perform_later(doi) count += 1 end - Rails.logger.info "[Elasticsearch] Imported #{count} other DOIs with IDs #{id} - #{(id + 499)}." + Rails.logger.info "[Elasticsearch] Imported #{count} other DOIs with IDs #{ + id + } - #{id + 499}." count end @@ -98,7 +131,7 @@ def self.import_by_id(options = {}) # +query+:: ES query to filter the index # +job_name+:: Acive Job class name of the Job that would be executed on every matched results def self.loop_through_dois(options = {}) - size = (options[:size] || 1000).to_i + size = (options[:size] || 1_000).to_i filter = options[:filter] || {} label = options[:label] || "" options[:job_name] ||= "" @@ -111,16 +144,25 @@ def self.loop_through_dois(options = {}) cursor = [] end - response = OtherDoi.query(query, filter.merge(page: { size: 1, cursor: [] })) + response = + OtherDoi.query(query, filter.merge(page: { size: 1, cursor: [] })) message = "#{label} #{response.results.total} other dois with #{label}." # walk through results using cursor if response.results.total.positive? while response.results.results.length.positive? - response = OtherDoi.query(query, filter.merge(page: { size: size, cursor: cursor })) + response = + OtherDoi.query( + query, + filter.merge(page: { size: size, cursor: cursor }), + ) break unless response.results.results.length.positive? - Rails.logger.info "#{label} #{response.results.results.length} other dois starting with _id #{response.results.to_a.first[:_id]}." + Rails.logger.info "#{label} #{ + response.results.results.length + } other dois starting with _id #{ + response.results.to_a.first[:_id] + }." cursor = response.results.to_a.last[:sort] Rails.logger.info "#{label} Cursor: #{cursor} " diff --git a/app/models/person.rb b/app/models/person.rb index b373c9f75..e2815cf1d 100644 --- a/app/models/person.rb +++ b/app/models/person.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Person # include helper module for PORO models include Modelable @@ -17,20 +19,26 @@ def self.find_by_id(id) employments = get_orcid(orcid: orcid, endpoint: "employments") - other_name = Array.wrap(person.dig("data", "other-names", "other-name")).map do |n| - n["content"] - end - - researcher_urls = Array.wrap(person.dig("data", "researcher-urls", "researcher-url")).map do |r| - { "name" => r["url-name"], - "url" => r.dig("url", "value") } - end - - identifiers = Array.wrap(person.dig("data", "external-identifiers", "external-identifier")).map do |i| - { "identifierType" => i["external-id-type"], - "identifierUrl" => i.dig("external-id-url", "value"), - "identifier" => i["external-id-value"] } - end + other_name = + Array.wrap(person.dig("data", "other-names", "other-name")).map do |n| + n["content"] + end + + researcher_urls = + Array.wrap(person.dig("data", "researcher-urls", "researcher-url")). + map { |r| { "name" => r["url-name"], "url" => r.dig("url", "value") } } + + identifiers = + Array.wrap( + person.dig("data", "external-identifiers", "external-identifier"), + ). + map do |i| + { + "identifierType" => i["external-id-type"], + "identifierUrl" => i.dig("external-id-url", "value"), + "identifier" => i["external-id-value"], + } + end employment = get_employments(employments) # wikidata_employment = wikidata_query(employment) @@ -44,7 +52,8 @@ def self.find_by_id(id) "description" => person.dig("data", "biography", "content"), "researcher-urls" => researcher_urls, "identifiers" => identifiers, - "country-code" => person.dig("data", "addresses", "address", 0, "country", "value"), + "country-code" => + person.dig("data", "addresses", "address", 0, "country", "value"), "employment" => employment, } @@ -65,27 +74,28 @@ def self.query(query, options = {}) "start" => options[:offset].to_i * options[:limit].to_i, }.compact - url = "https://pub.orcid.org/v3.0/expanded-search/?" + URI.encode_www_form(params) + url = + "https://pub.orcid.org/v3.0/expanded-search/?" + + URI.encode_www_form(params) response = Maremma.get(url, accept: "json") if response.status >= 400 - message = response.body.dig("errors", 0, "title", "developer-message") || "Something went wrong in ORCID" + message = + response.body.dig("errors", 0, "title", "developer-message") || + "Something went wrong in ORCID" fail ::Faraday::ClientError, message end return [] if response.status != 200 - data = Array.wrap(response.body.dig("data", "expanded-result")).map do |message| - parse_message(message: message) - end + data = + Array.wrap(response.body.dig("data", "expanded-result")).map do |message| + parse_message(message: message) + end meta = { "total" => response.body.dig("data", "num-found").to_i } errors = response.body.fetch("errors", nil) - { - data: data, - meta: meta, - errors: errors, - } + { data: data, meta: meta, errors: errors } end def self.get_orcid(orcid: nil, endpoint: nil) @@ -93,7 +103,9 @@ def self.get_orcid(orcid: nil, endpoint: nil) response = Maremma.get(url, accept: "json") if response.status >= 405 - message = response.body.dig("errors", 0, "title", "developer-message") || "Something went wrong in ORCID" + message = + response.body.dig("errors", 0, "title", "developer-message") || + "Something went wrong in ORCID" fail ::Faraday::ClientError, message end @@ -104,15 +116,41 @@ def self.get_orcid(orcid: nil, endpoint: nil) def self.get_employments(employments) Array.wrap(employments.dig("data", "affiliation-group")).map do |a| - i = a.dig("summaries", 0, "employment-summary", "organization", "disambiguated-organization") || {} + i = + a.dig( + "summaries", + 0, + "employment-summary", + "organization", + "disambiguated-organization", + ) || + {} s = a.dig("summaries", 0, "employment-summary", "start-date") || {} e = a.dig("summaries", 0, "employment-summary", "end-date") || {} - { "organization_id" => i.dig("disambiguation-source") == "GRID" ? "https://grid.ac/institutes/" + i.dig("disambiguated-organization-identifier") : nil, - "organization_name" => a.dig("summaries", 0, "employment-summary", "organization", "name"), - "role_title" => a.dig("summaries", 0, "employment-summary", "role-title"), - "start_date" => get_date_from_parts(s.dig("year", "value"), s.dig("month", "value"), s.dig("day", "value")), - "end_date" => get_date_from_parts(e.dig("year", "value"), e.dig("month", "value"), e.dig("day", "value")) }.compact + { + "organization_id" => + if i.dig("disambiguation-source") == "GRID" + "https://grid.ac/institutes/" + + i.dig("disambiguated-organization-identifier") + end, + "organization_name" => + a.dig("summaries", 0, "employment-summary", "organization", "name"), + "role_title" => + a.dig("summaries", 0, "employment-summary", "role-title"), + "start_date" => + get_date_from_parts( + s.dig("year", "value"), + s.dig("month", "value"), + s.dig("day", "value"), + ), + "end_date" => + get_date_from_parts( + e.dig("year", "value"), + e.dig("month", "value"), + e.dig("day", "value"), + ), + }.compact end end @@ -127,13 +165,14 @@ def self.parse_message(message: nil) identifiers = message.fetch("identifiers", []) employment = message.fetch("employment", []) - name = if message.fetch("credit-name", nil).present? - message.fetch("credit-name") - elsif given_name.present? || family_name.present? - [given_name, family_name].join(" ") - else - orcid - end + name = + if message.fetch("credit-name", nil).present? + message.fetch("credit-name") + elsif given_name.present? || family_name.present? + [given_name, family_name].join(" ") + else + orcid + end if message.fetch("country-code", nil).present? c = ISO3166::Country[message.fetch("country-code")] @@ -145,19 +184,21 @@ def self.parse_message(message: nil) country = nil end - Hashie::Mash.new({ - id: orcid_as_url(orcid), - type: "Person", - orcid: orcid, - name: name, - given_name: given_name, - family_name: family_name, - alternate_name: alternate_name, - description: description, - links: links, - identifiers: identifiers, - country: country, - employment: employment, - }.compact) + Hashie::Mash.new( + { + id: orcid_as_url(orcid), + type: "Person", + orcid: orcid, + name: name, + given_name: given_name, + family_name: family_name, + alternate_name: alternate_name, + description: description, + links: links, + identifiers: identifiers, + country: country, + employment: employment, + }.compact, + ) end end diff --git a/app/models/phrase.rb b/app/models/phrase.rb index 9f1d8049d..fa61bf1f4 100644 --- a/app/models/phrase.rb +++ b/app/models/phrase.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "pwqgen" class Phrase diff --git a/app/models/prefix.rb b/app/models/prefix.rb index 83761506d..db20a647d 100644 --- a/app/models/prefix.rb +++ b/app/models/prefix.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class Prefix < ApplicationRecord # include helper module for caching infrequently changing resources include Cacheable @@ -26,21 +28,21 @@ class Prefix < ApplicationRecord end mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :uid, type: :keyword - indexes :provider_ids, type: :keyword - indexes :client_ids, type: :keyword + indexes :id, type: :keyword + indexes :uid, type: :keyword + indexes :provider_ids, type: :keyword + indexes :client_ids, type: :keyword indexes :provider_prefix_ids, type: :keyword - indexes :client_prefix_ids, type: :keyword - indexes :state, type: :keyword - indexes :prefix, type: :text - indexes :created_at, type: :date + indexes :client_prefix_ids, type: :keyword + indexes :state, type: :keyword + indexes :prefix, type: :text + indexes :created_at, type: :date # index associations - indexes :clients, type: :object - indexes :providers, type: :object - indexes :client_prefixes, type: :object - indexes :provider_prefixes, type: :object + indexes :clients, type: :object + indexes :providers, type: :object + indexes :client_prefixes, type: :object + indexes :provider_prefixes, type: :object end def as_indexed_json(_options = {}) @@ -54,32 +56,49 @@ def as_indexed_json(_options = {}) "state" => state, "prefix" => prefix, "created_at" => created_at, - "clients" => clients.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, - "providers" => providers.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, - "client_prefixes" => client_prefixes.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, - "provider_prefixes" => provider_prefixes.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, + "clients" => + clients.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, + "providers" => + providers.map do |m| + m.try(:as_indexed_json, exclude_associations: true) + end, + "client_prefixes" => + client_prefixes.map do |m| + m.try(:as_indexed_json, exclude_associations: true) + end, + "provider_prefixes" => + provider_prefixes.map do |m| + m.try(:as_indexed_json, exclude_associations: true) + end, } end def self.query_aggregations { states: { terms: { field: "state", size: 3, min_doc_count: 1 } }, - years: { date_histogram: { field: "created_at", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 }, - aggs: { bucket_truncate: { bucket_sort: { size: 10 } } } }, - providers: { terms: { field: "provider_ids_and_names", size: 10, min_doc_count: 1 } }, - clients: { terms: { field: "client_ids_and_names", size: 10, min_doc_count: 1 } }, + years: { + date_histogram: { + field: "created_at", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, + }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, + }, + providers: { + terms: { field: "provider_ids_and_names", size: 10, min_doc_count: 1 }, + }, + clients: { + terms: { field: "client_ids_and_names", size: 10, min_doc_count: 1 }, + }, } end # return results for one prefix def self.find_by_id(id) __elasticsearch__.search( - query: { - term: { - uid: id, - }, - }, - aggregations: query_aggregations, + query: { term: { uid: id } }, aggregations: query_aggregations, ) end @@ -88,9 +107,7 @@ def client_ids end def client_ids_and_name - clients.pluck(:symbol, :name).map do |p| - "#{p[0].downcase}:#{p[1]}" - end + clients.pluck(:symbol, :name).map { |p| "#{p[0].downcase}:#{p[1]}" } end def provider_ids @@ -98,9 +115,7 @@ def provider_ids end def provider_ids_and_names - providers.pluck(:symbol, :name).map do |p| - "#{p[0].downcase}:#{p[1]}" - end + providers.pluck(:symbol, :name).map { |p| "#{p[0].downcase}:#{p[1]}" } end def client_prefix_ids diff --git a/app/models/provider.rb b/app/models/provider.rb index 7b2cbef3c..8e8c0479d 100644 --- a/app/models/provider.rb +++ b/app/models/provider.rb @@ -1,7 +1,20 @@ +# frozen_string_literal: true + require "countries" class Provider < ApplicationRecord - audited except: %i[globus_uuid salesforce_id password updated experiments comments logo version doi_quota_allowed doi_quota_used] + audited except: %i[ + globus_uuid + salesforce_id + password + updated + experiments + comments + logo + version + doi_quota_allowed + doi_quota_used + ] # include helper module for caching infrequently changing resources include Cacheable @@ -28,7 +41,10 @@ class Provider < ApplicationRecord default_style: :medium, default_url: "/images/members/default.png" - validates_attachment :logo, content_type: { content_type: ["image/jpg", "image/jpeg", "image/png"] } + validates_attachment :logo, + content_type: { + content_type: %w[image/jpg image/jpeg image/png], + } # define table and attribute names # uid is used as unique identifier, mapped to id in serializer @@ -40,21 +56,77 @@ class Provider < ApplicationRecord validates_presence_of :symbol, :name, :display_name, :system_email validates_uniqueness_of :symbol, message: "This name has already been taken" - validates_format_of :symbol, with: /\A([A-Z]+)\Z/, message: "should only contain capital letters" + validates_format_of :symbol, + with: /\A([A-Z]+)\Z/, + message: "should only contain capital letters" validates_length_of :symbol, minimum: 2, maximum: 8 - validates_format_of :system_email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, message: "system_email should be an email" - validates_format_of :group_email, with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, if: :group_email?, message: "group_email should be an email" - validates_format_of :website, with: /https?:\/\/[\S]+/, if: :website?, message: "Website should be a url" - validates_format_of :salesforce_id, with: /[a-zA-Z0-9]{18}/, message: "wrong format for salesforce id", if: :salesforce_id? - validates_inclusion_of :role_name, in: %w(ROLE_FOR_PROFIT_PROVIDER ROLE_CONTRACTUAL_PROVIDER ROLE_CONSORTIUM ROLE_CONSORTIUM_ORGANIZATION ROLE_ALLOCATOR ROLE_MEMBER ROLE_REGISTRATION_AGENCY ROLE_ADMIN ROLE_DEV), message: "Role %s is not included in the list" - validates_inclusion_of :organization_type, in: %w(researchInstitution academicInstitution governmentAgency nationalInstitution professionalSociety publisher serviceProvider other), message: "organization type %s is not included in the list", if: :organization_type? - validates_inclusion_of :non_profit_status, in: %w(non-profit for-profit), message: "non-profit status '%s' is not included in the list" - validates_inclusion_of :focus_area, in: %w(naturalSciences engineeringAndTechnology medicalAndHealthSciences agriculturalSciences socialSciences humanities general), message: "focus area %s is not included in the list", if: :focus_area? + validates_format_of :system_email, + with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, + message: "system_email should be an email" + validates_format_of :group_email, + with: /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i, + if: :group_email?, + message: "group_email should be an email" + validates_format_of :website, + with: %r{https?://\S+}, + if: :website?, + message: "Website should be a url" + validates_format_of :salesforce_id, + with: /[a-zA-Z0-9]{18}/, + message: "wrong format for salesforce id", + if: :salesforce_id? + validates_inclusion_of :role_name, + in: %w[ + ROLE_FOR_PROFIT_PROVIDER + ROLE_CONTRACTUAL_PROVIDER + ROLE_CONSORTIUM + ROLE_CONSORTIUM_ORGANIZATION + ROLE_ALLOCATOR + ROLE_MEMBER + ROLE_REGISTRATION_AGENCY + ROLE_ADMIN + ROLE_DEV + ], + message: "Role %s is not included in the list" + validates_inclusion_of :organization_type, + in: %w[ + researchInstitution + academicInstitution + governmentAgency + nationalInstitution + professionalSociety + publisher + serviceProvider + other + ], + message: + "organization type %s is not included in the list", + if: :organization_type? + validates_inclusion_of :non_profit_status, + in: %w[non-profit for-profit], + message: + "non-profit status '%s' is not included in the list" + validates_inclusion_of :focus_area, + in: %w[ + naturalSciences + engineeringAndTechnology + medicalAndHealthSciences + agriculturalSciences + socialSciences + humanities + general + ], + message: "focus area %s is not included in the list", + if: :focus_area? validate :freeze_symbol, on: :update validate :can_be_in_consortium validate :uuid_format, if: :globus_uuid? - validates_format_of :ror_id, with: /\Ahttps:\/\/ror\.org\/0\w{6}\d{2}\z/, if: :ror_id?, message: "ROR ID should be a url" - validates_format_of :twitter_handle, with: /\A@[a-zA-Z0-9_]{1,15}\z/, if: :twitter_handle? + validates_format_of :ror_id, + with: %r{\Ahttps://ror\.org/0\w{6}\d{2}\z}, + if: :ror_id?, + message: "ROR ID should be a url" + validates_format_of :twitter_handle, + with: /\A@[a-zA-Z0-9_]{1,15}\z/, if: :twitter_handle? validates_attachment_content_type :logo, content_type: /\Aimage/ @@ -71,8 +143,16 @@ class Provider < ApplicationRecord has_many :dois, through: :clients has_many :provider_prefixes, dependent: :destroy has_many :prefixes, through: :provider_prefixes - has_many :consortium_organizations, class_name: "Provider", primary_key: "symbol", foreign_key: "consortium_id", inverse_of: :consortium - belongs_to :consortium, class_name: "Provider", primary_key: "symbol", foreign_key: "consortium_id", inverse_of: :consortium_organizations, optional: true + has_many :consortium_organizations, + class_name: "Provider", + primary_key: "symbol", + foreign_key: "consortium_id", + inverse_of: :consortium + belongs_to :consortium, + class_name: "Provider", + primary_key: "symbol", + inverse_of: :consortium_organizations, + optional: true has_many :activities, as: :auditable, dependent: :destroy before_validation :set_region, :set_defaults @@ -93,93 +173,144 @@ class Provider < ApplicationRecord settings index: { analysis: { analyzer: { - string_lowercase: { tokenizer: "keyword", filter: %w(lowercase ascii_folding) }, + string_lowercase: { + tokenizer: "keyword", filter: %w[lowercase ascii_folding] + }, }, normalizer: { - keyword_lowercase: { type: "custom", filter: %w(lowercase) }, + keyword_lowercase: { type: "custom", filter: %w[lowercase] }, + }, + filter: { + ascii_folding: { + type: "asciifolding", preserve_original: true + }, }, - filter: { ascii_folding: { type: "asciifolding", preserve_original: true } }, }, } do mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :uid, type: :keyword, normalizer: "keyword_lowercase" - indexes :symbol, type: :keyword - indexes :globus_uuid, type: :keyword - indexes :client_ids, type: :keyword - indexes :prefix_ids, type: :keyword - indexes :name, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } - indexes :display_name, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } - indexes :system_email, type: :text, fields: { keyword: { type: "keyword" } } - indexes :group_email, type: :text, fields: { keyword: { type: "keyword" } } - indexes :version, type: :integer - indexes :is_active, type: :keyword - indexes :year, type: :integer - indexes :description, type: :text - indexes :website, type: :text, fields: { keyword: { type: "keyword" } } - indexes :logo_url, type: :text - indexes :image, type: :text - indexes :region, type: :keyword - indexes :focus_area, type: :keyword + indexes :id, type: :keyword + indexes :uid, type: :keyword, normalizer: "keyword_lowercase" + indexes :symbol, type: :keyword + indexes :globus_uuid, type: :keyword + indexes :client_ids, type: :keyword + indexes :prefix_ids, type: :keyword + indexes :name, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + } + indexes :display_name, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + } + indexes :system_email, + type: :text, fields: { keyword: { type: "keyword" } } + indexes :group_email, + type: :text, fields: { keyword: { type: "keyword" } } + indexes :version, type: :integer + indexes :is_active, type: :keyword + indexes :year, type: :integer + indexes :description, type: :text + indexes :website, type: :text, fields: { keyword: { type: "keyword" } } + indexes :logo_url, type: :text + indexes :image, type: :text + indexes :region, type: :keyword + indexes :focus_area, type: :keyword indexes :organization_type, type: :keyword indexes :member_type, type: :keyword indexes :non_profit_status, type: :keyword - indexes :consortium_id, type: :text, fields: { keyword: { type: "keyword" }, raw: { type: "text", "analyzer": "string_lowercase", "fielddata": true } } + indexes :consortium_id, + type: :text, + fields: { + keyword: { type: "keyword" }, + raw: { + type: "text", + "analyzer": "string_lowercase", + "fielddata": true, + }, + } indexes :consortium_organization_ids, type: :keyword - indexes :country_code, type: :keyword - indexes :role_name, type: :keyword - indexes :cache_key, type: :keyword - indexes :joined, type: :date + indexes :country_code, type: :keyword + indexes :role_name, type: :keyword + indexes :cache_key, type: :keyword + indexes :joined, type: :date indexes :twitter_handle, type: :keyword - indexes :ror_id, type: :keyword + indexes :ror_id, type: :keyword indexes :salesforce_id, type: :keyword - indexes :billing_information, type: :object, properties: { - postCode: { type: :keyword }, - state: { type: :text }, - organization: { type: :text }, - department: { type: :text }, - city: { type: :text }, - country: { type: :keyword }, - address: { type: :text }, - } - indexes :technical_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :secondary_technical_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :billing_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :secondary_billing_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :service_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :secondary_service_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :voting_contact, type: :object, properties: { - email: { type: :text }, - given_name: { type: :text }, - family_name: { type: :text }, - } - indexes :created, type: :date - indexes :updated, type: :date - indexes :deleted_at, type: :date + indexes :billing_information, + type: :object, + properties: { + postCode: { type: :keyword }, + state: { type: :text }, + organization: { type: :text }, + department: { type: :text }, + city: { type: :text }, + country: { type: :keyword }, + address: { type: :text }, + } + indexes :technical_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :secondary_technical_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :billing_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :secondary_billing_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :service_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :secondary_service_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :voting_contact, + type: :object, + properties: { + email: { type: :text }, + given_name: { type: :text }, + family_name: { type: :text }, + } + indexes :created, type: :date + indexes :updated, type: :date + indexes :deleted_at, type: :date indexes :cumulative_years, type: :integer, index: "false" indexes :consortium, type: :object @@ -211,7 +342,8 @@ def as_indexed_json(options = {}) "member_type" => member_type, "non_profit_status" => non_profit_status, "consortium_id" => consortium_id, - "consortium_organization_ids" => options[:exclude_associations] ? nil : consortium_organization_ids, + "consortium_organization_ids" => + options[:exclude_associations] ? nil : consortium_organization_ids, "role_name" => role_name, "password" => password, "cache_key" => cache_key, @@ -245,19 +377,42 @@ def as_indexed_json(options = {}) end def self.query_fields - ["uid^10", "symbol^10", "name^5", "system_email^5", "group_email^5", "_all"] + %w[uid^10 symbol^10 name^5 system_email^5 group_email^5 _all] end def self.query_aggregations { - years: { date_histogram: { field: "created", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 }, - aggs: { bucket_truncate: { bucket_sort: { size: 10 } } } }, - cumulative_years: { terms: { field: "cumulative_years", size: 20, min_doc_count: 1, order: { _count: "asc" } } }, + years: { + date_histogram: { + field: "created", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, + }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, + }, + cumulative_years: { + terms: { + field: "cumulative_years", + size: 20, + min_doc_count: 1, + order: { _count: "asc" }, + }, + }, regions: { terms: { field: "region", size: 10, min_doc_count: 1 } }, - member_types: { terms: { field: "member_type", size: 10, min_doc_count: 1 } }, - organization_types: { terms: { field: "organization_type", size: 10, min_doc_count: 1 } }, - focus_areas: { terms: { field: "focus_area", size: 10, min_doc_count: 1 } }, - non_profit_statuses: { terms: { field: "non_profit_status", size: 10, min_doc_count: 1 } }, + member_types: { + terms: { field: "member_type", size: 10, min_doc_count: 1 }, + }, + organization_types: { + terms: { field: "organization_type", size: 10, min_doc_count: 1 }, + }, + focus_areas: { + terms: { field: "focus_area", size: 10, min_doc_count: 1 }, + }, + non_profit_statuses: { + terms: { field: "non_profit_status", size: 10, min_doc_count: 1 }, + }, } end @@ -267,7 +422,8 @@ def csv provider_id: symbol, consortium_id: consortium.present? ? consortium.symbol : "", salesforce_id: salesforce_id, - consortium_salesforce_id: consortium.present? ? consortium.salesforce_id : "", + consortium_salesforce_id: + consortium.present? ? consortium.salesforce_id : "", role_name: role_name, is_active: is_active == "\x01", description: description, @@ -284,14 +440,18 @@ def csv technical_contact_given_name: technical_contact_given_name, technical_contact_family_name: technical_contact_family_name, secondary_technical_contact_email: secondary_technical_contact_email, - secondary_technical_contact_given_name: secondary_technical_contact_given_name, - secondary_technical_contact_family_name: secondary_technical_contact_family_name, + secondary_technical_contact_given_name: + secondary_technical_contact_given_name, + secondary_technical_contact_family_name: + secondary_technical_contact_family_name, service_contact_email: service_contact_email, service_contact_given_name: service_contact_given_name, service_contact_family_name: service_contact_family_name, secondary_service_contact_email: secondary_service_contact_email, - secondary_service_contact_given_name: secondary_service_contact_given_name, - secondary_service_contact_family_name: secondary_service_contact_family_name, + secondary_service_contact_given_name: + secondary_service_contact_given_name, + secondary_service_contact_family_name: + secondary_service_contact_family_name, voting_contact_email: voting_contact_email, voting_contact_given_name: voting_contact_given_name, voting_contact_family_name: voting_contact_family_name, @@ -306,8 +466,10 @@ def csv billing_contact_given_name: billing_contact_given_name, billing_contact_family_name: billing_contact_family_name, secondary_billing_contact_email: secondary_billing_contact_email, - secondary_billing_contact_given_name: secondary_billing_contact_given_name, - secondary_billing_contact_family_name: secondary_billing_contact_family_name, + secondary_billing_contact_given_name: + secondary_billing_contact_given_name, + secondary_billing_contact_family_name: + secondary_billing_contact_family_name, twitter_handle: twitter_handle, ror_id: ror_id, created: created, @@ -323,7 +485,9 @@ def uid end def consortium_organization_ids - consortium_organizations.pluck(:symbol).map(&:downcase) if consortium_organizations.present? + if consortium_organizations.present? + consortium_organizations.pluck(:symbol).map(&:downcase) + end end def cache_key @@ -347,15 +511,21 @@ def technical_contact_family_name end def secondary_technical_contact_email - secondary_technical_contact.fetch("email", nil) if secondary_technical_contact.present? + if secondary_technical_contact.present? + secondary_technical_contact.fetch("email", nil) + end end def secondary_technical_contact_given_name - secondary_technical_contact.fetch("given_name", nil) if secondary_technical_contact.present? + if secondary_technical_contact.present? + secondary_technical_contact.fetch("given_name", nil) + end end def secondary_technical_contact_family_name - secondary_technical_contact.fetch("family_name", nil) if secondary_technical_contact.present? + if secondary_technical_contact.present? + secondary_technical_contact.fetch("family_name", nil) + end end def service_contact_email @@ -371,15 +541,21 @@ def service_contact_family_name end def secondary_service_contact_email - secondary_service_contact.fetch("email", nil) if secondary_service_contact.present? + if secondary_service_contact.present? + secondary_service_contact.fetch("email", nil) + end end def secondary_service_contact_given_name - secondary_service_contact.fetch("given_name", nil) if secondary_service_contact.present? + if secondary_service_contact.present? + secondary_service_contact.fetch("given_name", nil) + end end def secondary_service_contact_family_name - secondary_service_contact.fetch("family_name", nil) if secondary_service_contact.present? + if secondary_service_contact.present? + secondary_service_contact.fetch("family_name", nil) + end end def voting_contact_email @@ -399,7 +575,9 @@ def billing_department end def billing_organization - billing_information.fetch("organization", nil) if billing_information.present? + if billing_information.present? + billing_information.fetch("organization", nil) + end end def billing_address @@ -435,15 +613,21 @@ def billing_contact_family_name end def secondary_billing_contact_email - secondary_billing_contact.fetch("email", nil) if secondary_billing_contact.present? + if secondary_billing_contact.present? + secondary_billing_contact.fetch("email", nil) + end end def secondary_billing_contact_given_name - secondary_billing_contact.fetch("given_name", nil) if secondary_billing_contact.present? + if secondary_billing_contact.present? + secondary_billing_contact.fetch("given_name", nil) + end end def secondary_billing_contact_family_name - secondary_billing_contact.fetch("family_name", nil) if secondary_billing_contact.present? + if secondary_billing_contact.present? + secondary_billing_contact.fetch("family_name", nil) + end end def member_type_label @@ -505,16 +689,12 @@ def country_name end def set_region - r = if country_code.present? - ISO3166::Country[country_code].world_region - end + r = ISO3166::Country[country_code].world_region if country_code.present? write_attribute(:region, r) end def regions - { "AMER" => "Americas", - "APAC" => "Asia Pacific", - "EMEA" => "EMEA" } + { "AMER" => "Americas", "APAC" => "Asia Pacific", "EMEA" => "EMEA" } end def region_human_name @@ -539,14 +719,22 @@ def prefix_ids def can_be_in_consortium if consortium_id && member_type != "consortium_organization" - errors.add(:consortium_id, "The provider must be of member_type consortium_organization") + errors.add( + :consortium_id, + "The provider must be of member_type consortium_organization", + ) elsif consortium_id && consortium.member_type != "consortium" - errors.add(:consortium_id, "The consortium must be of member_type consortium") + errors.add( + :consortium_id, + "The consortium must be of member_type consortium", + ) end end def uuid_format - errors.add(:globus_uuid, "#{globus_uuid} is not a valid UUID") unless UUID.validate(globus_uuid) + unless UUID.validate(globus_uuid) + errors.add(:globus_uuid, "#{globus_uuid} is not a valid UUID") + end end def freeze_symbol @@ -579,30 +767,32 @@ def to_jsonapi "deleted_at" => deleted_at ? deleted_at.iso8601 : nil, } - { "id" => symbol.downcase, "type" => "providers", "attributes" => attributes } + { + "id" => symbol.downcase, "type" => "providers", "attributes" => attributes + } end private + def set_region + r = ISO3166::Country[country_code].world_region if country_code.present? + write_attribute(:region, r) + end - def set_region - r = if country_code.present? - ISO3166::Country[country_code].world_region - end - write_attribute(:region, r) - end - - def set_defaults - self.symbol = symbol.upcase if symbol.present? - self.is_active = is_active ? "\x01" : "\x00" - self.version = version.present? ? version + 1 : 0 - self.role_name = "ROLE_ALLOCATOR" if role_name.blank? - self.doi_quota_used = 0 unless doi_quota_used.to_i > 0 - self.doi_quota_allowed = -1 unless doi_quota_allowed.to_i > 0 - self.billing_information = {} if billing_information.blank? - self.consortium_id = nil unless member_type == "consortium_organization" - self.non_profit_status = "non-profit" if non_profit_status.blank? - - # custom filename for attachment as data URLs don't support filenames - self.logo_file_name = symbol.downcase + "." + logo_content_type.split("/").last if logo_content_type.present? - end + def set_defaults + self.symbol = symbol.upcase if symbol.present? + self.is_active = is_active ? "\x01" : "\x00" + self.version = version.present? ? version + 1 : 0 + self.role_name = "ROLE_ALLOCATOR" if role_name.blank? + self.doi_quota_used = 0 unless doi_quota_used.to_i > 0 + self.doi_quota_allowed = -1 unless doi_quota_allowed.to_i > 0 + self.billing_information = {} if billing_information.blank? + self.consortium_id = nil unless member_type == "consortium_organization" + self.non_profit_status = "non-profit" if non_profit_status.blank? + + # custom filename for attachment as data URLs don't support filenames + if logo_content_type.present? + self.logo_file_name = + symbol.downcase + "." + logo_content_type.split("/").last + end + end end diff --git a/app/models/provider_prefix.rb b/app/models/provider_prefix.rb index 7c54295b2..e7db8035c 100644 --- a/app/models/provider_prefix.rb +++ b/app/models/provider_prefix.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ProviderPrefix < ApplicationRecord # include helper module for caching infrequently changing resources include Cacheable @@ -26,31 +28,33 @@ class ProviderPrefix < ApplicationRecord end mapping dynamic: "false" do - indexes :id, type: :keyword - indexes :uid, type: :keyword - indexes :state, type: :keyword - indexes :provider_id, type: :keyword + indexes :id, type: :keyword + indexes :uid, type: :keyword + indexes :state, type: :keyword + indexes :provider_id, type: :keyword indexes :provider_id_and_name, type: :keyword - indexes :consortium_id, type: :keyword - indexes :prefix_id, type: :keyword - indexes :client_ids, type: :keyword + indexes :consortium_id, type: :keyword + indexes :prefix_id, type: :keyword + indexes :client_ids, type: :keyword indexes :client_prefix_ids, type: :keyword - indexes :created_at, type: :date - indexes :updated_at, type: :date + indexes :created_at, type: :date + indexes :updated_at, type: :date # index associations - indexes :provider, type: :object - indexes :prefix, type: :object, properties: { - id: { type: :keyword }, - uid: { type: :keyword }, - provider_ids: { type: :keyword }, - client_ids: { type: :keyword }, - state: { type: :keyword }, - prefix: { type: :text }, - created_at: { type: :date }, - } - indexes :clients, type: :object - indexes :client_prefixes, type: :object + indexes :provider, type: :object + indexes :prefix, + type: :object, + properties: { + id: { type: :keyword }, + uid: { type: :keyword }, + provider_ids: { type: :keyword }, + client_ids: { type: :keyword }, + state: { type: :keyword }, + prefix: { type: :text }, + created_at: { type: :date }, + } + indexes :clients, type: :object + indexes :client_prefixes, type: :object end def as_indexed_json(options = {}) @@ -67,18 +71,47 @@ def as_indexed_json(options = {}) "created_at" => created_at, "updated_at" => updated_at, "provider" => provider.try(:as_indexed_json, exclude_associations: true), - "prefix" => options[:exclude_associations] ? nil : prefix.try(:as_indexed_json, exclude_associations: true), - "clients" => options[:exclude_associations] ? nil : clients.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, - "client_prefixes" => options[:exclude_associations] ? nil : client_prefixes.map { |m| m.try(:as_indexed_json, exclude_associations: true) }, + "prefix" => + if options[:exclude_associations] + nil + else + prefix.try(:as_indexed_json, exclude_associations: true) + end, + "clients" => + if options[:exclude_associations] + nil + else + clients.map do |m| + m.try(:as_indexed_json, exclude_associations: true) + end + end, + "client_prefixes" => + if options[:exclude_associations] + nil + else + client_prefixes.map do |m| + m.try(:as_indexed_json, exclude_associations: true) + end + end, } end def self.query_aggregations { states: { terms: { field: "state", size: 2, min_doc_count: 1 } }, - years: { date_histogram: { field: "created_at", interval: "year", format: "year", order: { _key: "desc" }, min_doc_count: 1 }, - aggs: { bucket_truncate: { bucket_sort: { size: 10 } } } }, - providers: { terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 } }, + years: { + date_histogram: { + field: "created_at", + interval: "year", + format: "year", + order: { _key: "desc" }, + min_doc_count: 1, + }, + aggs: { bucket_truncate: { bucket_sort: { size: 10 } } }, + }, + providers: { + terms: { field: "provider_id_and_name", size: 10, min_doc_count: 1 }, + }, } end @@ -125,17 +158,12 @@ def client_prefix_ids end def state - if client_prefix_ids.present? - "with-repository" - else - "without-repository" - end + client_prefix_ids.present? ? "with-repository" : "without-repository" end private - - # uuid for public id - def set_uid - self.uid = SecureRandom.uuid - end + # uuid for public id + def set_uid + self.uid = SecureRandom.uuid + end end diff --git a/app/models/resource_type.rb b/app/models/resource_type.rb index 618223eeb..6e15ac266 100644 --- a/app/models/resource_type.rb +++ b/app/models/resource_type.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ResourceType include Searchable @@ -30,66 +32,21 @@ def self.debug def self.get_data(_options = {}) [ - { - "id" => "audiovisual", - "title" => "Audiovisual", - }, - { - "id" => "collection", - "title" => "Collection", - }, - { - "id" => "data-paper", - "title" => "DataPaper", - }, - { - "id" => "dataset", - "title" => "Dataset", - }, - { - "id" => "event", - "title" => "Event", - }, - { - "id" => "image", - "title" => "Image", - }, - { - "id" => "interactive-resource", - "title" => "InteractiveResource", - }, - { - "id" => "model", - "title" => "Model", - }, - { - "id" => "physical-object", - "title" => "PhysicalObject", - }, - { - "id" => "service", - "title" => "Service", - }, - { - "id" => "software", - "title" => "Software", - }, - { - "id" => "sound", - "title" => "Sound", - }, - { - "id" => "text", - "title" => "Text", - }, - { - "id" => "workflow", - "title" => "Workflow", - }, - { - "id" => "other", - "title" => "Other", - }, + { "id" => "audiovisual", "title" => "Audiovisual" }, + { "id" => "collection", "title" => "Collection" }, + { "id" => "data-paper", "title" => "DataPaper" }, + { "id" => "dataset", "title" => "Dataset" }, + { "id" => "event", "title" => "Event" }, + { "id" => "image", "title" => "Image" }, + { "id" => "interactive-resource", "title" => "InteractiveResource" }, + { "id" => "model", "title" => "Model" }, + { "id" => "physical-object", "title" => "PhysicalObject" }, + { "id" => "service", "title" => "Service" }, + { "id" => "software", "title" => "Software" }, + { "id" => "sound", "title" => "Sound" }, + { "id" => "text", "title" => "Text" }, + { "id" => "workflow", "title" => "Workflow" }, + { "id" => "other", "title" => "Other" }, ] end @@ -100,10 +57,25 @@ def self.parse_data(items, options = {}) { data: parse_item(item) } else - items = items.select { |i| (i.fetch("title", "").downcase + i.fetch("description", "").downcase).include?(options[:query]) } if options[:query] + if options[:query] + items = + items.select do |i| + ( + i.fetch("title", "").downcase + + i.fetch("description", "").downcase + ). + include?(options[:query]) + end + end page = (options.dig(:page, :number) || 1).to_i - per_page = options.dig(:page, :size) && (1..1000).cover?(options.dig(:page, :size).to_i) ? options.dig(:page, :size).to_i : 25 + per_page = + if options.dig(:page, :size) && + (1..1_000).cover?(options.dig(:page, :size).to_i) + options.dig(:page, :size).to_i + else + 25 + end total_pages = (items.length.to_f / per_page).ceil meta = { total: items.length, "total-pages" => total_pages, page: page } diff --git a/app/models/usage_report.rb b/app/models/usage_report.rb index 46509478a..e9dc35546 100644 --- a/app/models/usage_report.rb +++ b/app/models/usage_report.rb @@ -6,7 +6,12 @@ class UsageReport def self.find_by_id(id) ids = id.split(",") - base_url = Rails.env.production? ? "https://api.datacite.org/reports" : "https://api.test.datacite.org/reports" + base_url = + if Rails.env.production? + "https://api.datacite.org/reports" + else + "https://api.test.datacite.org/reports" + end return {} unless id.starts_with?(base_url) url = id @@ -27,16 +32,22 @@ def self.query(_query, options = {}) number = (options.dig(:page, :number) || 1).to_i size = (options.dig(:page, :size) || 25).to_i - base_url = Rails.env.production? ? "https://api.datacite.org/reports" : "https://api.test.datacite.org/reports" + base_url = + if Rails.env.production? + "https://api.datacite.org/reports" + else + "https://api.test.datacite.org/reports" + end url = base_url + "?page[size]=#{size}&page[number]=#{number}" response = Maremma.get(url) return {} if response.status != 200 - data = response.body.dig("data", "reports").map do |message| - parse_message(id: base_url + "/#{message['id']}", message: message) - end + data = + response.body.dig("data", "reports").map do |message| + parse_message(id: base_url + "/#{message['id']}", message: message) + end meta = { "total" => response.body.dig("data", "meta", "total") } errors = response.body.fetch("errors", nil) @@ -45,7 +56,8 @@ def self.query(_query, options = {}) def self.parse_message(id: nil, message: nil) reporting_period = { - begin_date: message.dig("report-header", "reporting-period", "begin-date"), + begin_date: + message.dig("report-header", "reporting-period", "begin-date"), end_date: message.dig("report-header", "reporting-period", "end-date"), } diff --git a/app/models/user.rb b/app/models/user.rb index fb54ac034..ada5b456a 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class User # include jwt encode and decode include Authenticable @@ -11,28 +13,53 @@ class User # include helper module for caching infrequently changing resources include Cacheable - attr_accessor :name, :uid, :email, :role_id, :jwt, :password, :provider_id, :client_id, :beta_tester, :has_orcid_token, :errors + attr_accessor :name, + :uid, + :email, + :role_id, + :jwt, + :password, + :provider_id, + :client_id, + :beta_tester, + :has_orcid_token, + :errors def initialize(credentials, options = {}) if credentials.present? && options.fetch(:type, "").casecmp("basic").zero? username, password = ::Base64.decode64(credentials).split(":", 2) payload = decode_auth_param(username: username, password: password) - @jwt = encode_token(payload.merge(iat: Time.now.to_i, exp: Time.now.to_i + 3600 * 24 * 30, aud: Rails.env)) + @jwt = + encode_token( + payload.merge( + iat: Time.now.to_i, + exp: Time.now.to_i + 3_600 * 24 * 30, + aud: Rails.env, + ), + ) elsif credentials.present? && options.fetch(:type, "").casecmp("oidc").zero? payload = decode_alb_token(credentials) # globus auth preferred_username looks like 0000-0003-1419-2405@orcid.org # default to role user unless database says otherwise - uid = payload["preferred_username"].present? ? payload["preferred_username"][0..18] : nil + uid = + if payload["preferred_username"].present? + payload["preferred_username"][0..18] + end if uid.present? payload = { - "uid" => uid, - "name" => payload["name"], - "email" => payload["email"], + "uid" => uid, "name" => payload["name"], "email" => payload["email"] } - @jwt = encode_token(payload.merge(iat: Time.now.to_i, exp: Time.now.to_i + 3600 * 24 * 30, aud: Rails.env)) + @jwt = + encode_token( + payload.merge( + iat: Time.now.to_i, + exp: Time.now.to_i + 3_600 * 24 * 30, + aud: Rails.env, + ), + ) end elsif credentials.present? payload = decode_token(credentials) @@ -68,7 +95,7 @@ def is_admin? # Helper method to check for admin or staff user def is_admin_or_staff? - ["staff_admin", "staff_user"].include?(role_id) + %w[staff_admin staff_user].include?(role_id) end # Helper method to check for beta tester @@ -111,19 +138,56 @@ def self.reset(username) "provider_id" => provider_id, }.compact - jwt = encode_token(payload.merge(iat: Time.now.to_i, exp: Time.now.to_i + 3600 * 24, aud: Rails.env)) + jwt = + encode_token( + payload.merge( + iat: Time.now.to_i, exp: Time.now.to_i + 3_600 * 24, aud: Rails.env, + ), + ) url = ENV["BRACCO_URL"] + "?jwt=" + jwt reset_url = ENV["BRACCO_URL"] + "/reset" - if Rails.env.stage? - title = ENV["ES_PREFIX"].present? ? "DataCite Fabrica Stage" : "DataCite Fabrica Test" + title = if Rails.env.stage? + if ENV["ES_PREFIX"].present? + "DataCite Fabrica Stage" + else + "DataCite Fabrica Test" + end else - title = "DataCite Fabrica" + "DataCite Fabrica" end subject = "#{title}: Password Reset Request" - account_type = user.class.name == "Provider" ? user.member_type.humanize : user.client_type.humanize - text = User.format_message_text(template: "users/reset.text.erb", title: title, contact_name: user.name, name: user.symbol, url: url, reset_url: reset_url) - html = User.format_message_html(template: "users/reset.html.erb", title: title, contact_name: user.name, name: user.symbol, url: url, reset_url: reset_url) - response = send_email_message(name: user.name, email: user.system_email, subject: subject, text: text, html: html) + account_type = + if user.instance_of?(Provider) + user.member_type.humanize + else + user.client_type.humanize + end + text = + User.format_message_text( + template: "users/reset.text.erb", + title: title, + contact_name: user.name, + name: user.symbol, + url: url, + reset_url: reset_url, + ) + html = + User.format_message_html( + template: "users/reset.html.erb", + title: title, + contact_name: user.name, + name: user.symbol, + url: url, + reset_url: reset_url, + ) + response = + send_email_message( + name: user.name, + email: user.system_email, + subject: subject, + text: text, + html: html, + ) fields = [ { title: "Account ID", value: uid.upcase, short: true }, @@ -133,7 +197,10 @@ def self.reset(username) ] slack_title = subject + (response[:status] == 200 ? " Sent" : " Failed") level = response[:status] == 200 ? "good" : "danger" - send_notification_to_slack(nil, title: slack_title, level: level, fields: fields) + send_notification_to_slack( + nil, + title: slack_title, level: level, fields: fields, + ) response end diff --git a/app/serializers/activity_serializer.rb b/app/serializers/activity_serializer.rb index b31cba468..45f4e4093 100644 --- a/app/serializers/activity_serializer.rb +++ b/app/serializers/activity_serializer.rb @@ -1,10 +1,18 @@ +# frozen_string_literal: true + class ActivitySerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower set_type :activities set_id :request_uuid - attributes "prov:wasGeneratedBy", "prov:generatedAtTime", "prov:wasDerivedFrom", "prov:wasAttributedTo", :action, :version, :changes + attributes "prov:wasGeneratedBy", + "prov:generatedAtTime", + "prov:wasDerivedFrom", + "prov:wasAttributedTo", + :action, + :version, + :changes attribute "prov:wasDerivedFrom", &:was_derived_from diff --git a/app/serializers/client_prefix_serializer.rb b/app/serializers/client_prefix_serializer.rb index 5c76c1b16..755242940 100644 --- a/app/serializers/client_prefix_serializer.rb +++ b/app/serializers/client_prefix_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ClientPrefixSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower diff --git a/app/serializers/client_serializer.rb b/app/serializers/client_serializer.rb index 72eade481..4562ecddb 100644 --- a/app/serializers/client_serializer.rb +++ b/app/serializers/client_serializer.rb @@ -1,13 +1,34 @@ +# frozen_string_literal: true + class ClientSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower set_type :clients set_id :uid - attributes :name, :symbol, :year, :contact_email, :globus_uuid, :alternate_name, :description, :language, :client_type, :domains, :re3data, :opendoar, :issn, :url, :salesforce_id, :created, :updated + attributes :name, + :symbol, + :year, + :contact_email, + :globus_uuid, + :alternate_name, + :description, + :language, + :client_type, + :domains, + :re3data, + :opendoar, + :issn, + :url, + :salesforce_id, + :created, + :updated belongs_to :provider, record_type: :providers - belongs_to :consortium, record_type: :providers, serializer: ProviderSerializer, if: Proc.new { |client| client.consortium_id } + belongs_to :consortium, + record_type: :providers, + serializer: ProviderSerializer, + if: Proc.new(&:consortium_id) has_many :prefixes, record_type: :prefixes attribute :is_active do |object| @@ -20,15 +41,34 @@ class ClientSerializer attribute :contact_email, &:system_email - attribute :salesforce_id, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_salesforce_id, object) == true }, &:salesforce_id + attribute :salesforce_id, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?(:read_salesforce_id, object) == + true + }, + &:salesforce_id - attribute :globus_uuid, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_billing_information, object) == true }, &:globus_uuid + attribute :globus_uuid, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_billing_information, + object, + ) == + true + }, + &:globus_uuid attribute :re3data do |object| "https://doi.org/#{object.re3data_id}" if object.re3data_id.present? end attribute :opendoar do |object| - "https://v2.sherpa.ac.uk/id/repository/#{object.opendoar_id}" if object.opendoar_id.present? + if object.opendoar_id.present? + "https://v2.sherpa.ac.uk/id/repository/#{object.opendoar_id}" + end end end diff --git a/app/serializers/data_center_serializer.rb b/app/serializers/data_center_serializer.rb index a8ae41988..fd5ed2952 100644 --- a/app/serializers/data_center_serializer.rb +++ b/app/serializers/data_center_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DataCenterSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash @@ -5,7 +7,13 @@ class DataCenterSerializer set_id :uid # don't cache data-centers, as they use the client model - attributes :title, :other_names, :prefixes, :member_id, :year, :created, :updated + attributes :title, + :other_names, + :prefixes, + :member_id, + :year, + :created, + :updated belongs_to :provider, key: :member, record_type: :members, serializer: :Member diff --git a/app/serializers/datacite_doi_serializer.rb b/app/serializers/datacite_doi_serializer.rb index 2e784686b..98897079d 100644 --- a/app/serializers/datacite_doi_serializer.rb +++ b/app/serializers/datacite_doi_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DataciteDoiSerializer include FastJsonapi::ObjectSerializer @@ -6,25 +8,111 @@ class DataciteDoiSerializer set_id :uid # don't cache dois, as works are cached using the doi model - attributes :doi, :prefix, :suffix, :identifiers, :alternate_identifiers, :creators, :titles, :publisher, :container, :publication_year, :subjects, :contributors, :dates, :language, :types, :related_identifiers, :sizes, :formats, :version, :rights_list, :descriptions, :geo_locations, :funding_references, :xml, :url, :content_url, :metadata_version, :schema_version, :source, :is_active, :state, :reason, :landing_page, :view_count, :views_over_time, :download_count, :downloads_over_time, :reference_count, :citation_count, :citations_over_time, :part_count, :part_of_count, :version_count, :version_of_count, :created, :registered, :published, :updated - attributes :prefix, :suffix, :views_over_time, :downloads_over_time, :citations_over_time, if: Proc.new { |_object, params| params && params[:detail] } + attributes :doi, + :prefix, + :suffix, + :identifiers, + :alternate_identifiers, + :creators, + :titles, + :publisher, + :container, + :publication_year, + :subjects, + :contributors, + :dates, + :language, + :types, + :related_identifiers, + :sizes, + :formats, + :version, + :rights_list, + :descriptions, + :geo_locations, + :funding_references, + :xml, + :url, + :content_url, + :metadata_version, + :schema_version, + :source, + :is_active, + :state, + :reason, + :landing_page, + :view_count, + :views_over_time, + :download_count, + :downloads_over_time, + :reference_count, + :citation_count, + :citations_over_time, + :part_count, + :part_of_count, + :version_count, + :version_of_count, + :created, + :registered, + :published, + :updated + attributes :prefix, + :suffix, + :views_over_time, + :downloads_over_time, + :citations_over_time, + if: Proc.new { |_object, params| params && params[:detail] } belongs_to :client, record_type: :clients - belongs_to :provider, record_type: :providers, if: Proc.new { |_object, params| params && params[:detail] } - has_many :media, record_type: :media, id_method_name: :uid, if: Proc.new { |_object, params| params && params[:detail] && !params[:is_collection] } - has_many :references, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_references, if: Proc.new { |_object, params| params && params[:detail] } - has_many :citations, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_citations, if: Proc.new { |_object, params| params && params[:detail] } - has_many :parts, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_parts, if: Proc.new { |_object, params| params && params[:detail] } - has_many :part_of, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_part_of, if: Proc.new { |_object, params| params && params[:detail] } - has_many :versions, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_versions, if: Proc.new { |_object, params| params && params[:detail] } - has_many :version_of, record_type: :dois, serializer: DataciteDoiSerializer, object_method_name: :indexed_version_of, if: Proc.new { |_object, params| params && params[:detail] } - - attribute :xml, if: Proc.new { |_object, params| params && params[:detail] } do |object| - begin - Base64.strict_encode64(object.xml) if object.xml.present? - rescue ArgumentError - nil - end + belongs_to :provider, + record_type: :providers, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :media, + record_type: :media, + id_method_name: :uid, + if: + Proc.new { |_object, params| + params && params[:detail] && !params[:is_collection] + } + has_many :references, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_references, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :citations, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_citations, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :parts, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_parts, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :part_of, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_part_of, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :versions, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_versions, + if: Proc.new { |_object, params| params && params[:detail] } + has_many :version_of, + record_type: :dois, + serializer: DataciteDoiSerializer, + object_method_name: :indexed_version_of, + if: Proc.new { |_object, params| params && params[:detail] } + + attribute :xml, + if: + Proc.new { |_object, params| + params && params[:detail] + } do |object| + Base64.strict_encode64(object.xml) if object.xml.present? + rescue ArgumentError + nil end attribute :doi, &:uid @@ -32,29 +120,29 @@ class DataciteDoiSerializer attribute :creators do |object, params| # Always return an array of creators and affiliations # use new array format only if affiliation param present - Array.wrap(object.creators).map do |c| - c["affiliation"] = Array.wrap(c["affiliation"]).map do |a| - if params[:affiliation] - a - else - a["name"] - end - end.compact + Array.wrap(object.creators). + map do |c| + c["affiliation"] = + Array.wrap(c["affiliation"]).map do |a| + params[:affiliation] ? a : a["name"] + end.compact c end.compact end - attribute :contributors, if: Proc.new { |_object, params| params && params[:composite].blank? } do |object, params| + attribute :contributors, + if: + Proc.new { |_object, params| + params && params[:composite].blank? + } do |object, params| # Always return an array of contributors and affiliations # use new array format only if param present - Array.wrap(object.contributors).map do |c| - c["affiliation"] = Array.wrap(c["affiliation"]).map do |a| - if params[:affiliation] - a - else - a["name"] - end - end.compact + Array.wrap(object.contributors). + map do |c| + c["affiliation"] = + Array.wrap(c["affiliation"]).map do |a| + params[:affiliation] ? a : a["name"] + end.compact c end.compact end @@ -63,25 +151,48 @@ class DataciteDoiSerializer Array.wrap(object.rights_list) end - attribute :funding_references, if: Proc.new { |_object, params| params && params[:composite].blank? } do |object| + attribute :funding_references, + if: + Proc.new { |_object, params| + params && params[:composite].blank? + } do |object| Array.wrap(object.funding_references) end attribute :identifiers do |object| - Array.wrap(object.identifiers).select { |r| [object.doi, object.url].exclude?(r["identifier"]) } + Array.wrap(object.identifiers).select do |r| + [object.doi, object.url].exclude?(r["identifier"]) + end end - attribute :alternate_identifiers, if: Proc.new { |_object, params| params && params[:detail] } do |object| - Array.wrap(object.identifiers).select { |r| [object.doi, object.url].exclude?(r["identifier"]) }.map do |a| - { "alternateIdentifierType" => a["identifierType"], "alternateIdentifier" => a["identifier"] } + attribute :alternate_identifiers, + if: + Proc.new { |_object, params| + params && params[:detail] + } do |object| + Array.wrap(object.identifiers).select do |r| + [object.doi, object.url].exclude?(r["identifier"]) + end.map do |a| + { + "alternateIdentifierType" => a["identifierType"], + "alternateIdentifier" => a["identifier"], + } end.compact end - attribute :related_identifiers, if: Proc.new { |_object, params| params && params[:composite].blank? } do |object| + attribute :related_identifiers, + if: + Proc.new { |_object, params| + params && params[:composite].blank? + } do |object| Array.wrap(object.related_identifiers) end - attribute :geo_locations, if: Proc.new { |_object, params| params && params[:composite].blank? } do |object| + attribute :geo_locations, + if: + Proc.new { |_object, params| + params && params[:composite].blank? + } do |object| Array.wrap(object.geo_locations) end @@ -89,7 +200,11 @@ class DataciteDoiSerializer Array.wrap(object.dates) end - attribute :subjects, if: Proc.new { |_object, params| params && params[:composite].blank? } do |object| + attribute :subjects, + if: + Proc.new { |_object, params| + params && params[:composite].blank? + } do |object| Array.wrap(object.subjects) end @@ -125,5 +240,15 @@ class DataciteDoiSerializer object.is_active.to_s.getbyte(0) == 1 end - attribute :landing_page, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_landing_page_results, object) == true }, &:landing_page + attribute :landing_page, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_landing_page_results, + object, + ) == + true + }, + &:landing_page end diff --git a/app/serializers/download_serializer.rb b/app/serializers/download_serializer.rb index c75aad7fe..fc9866170 100644 --- a/app/serializers/download_serializer.rb +++ b/app/serializers/download_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class DownloadSerializer include FastJsonapi::ObjectSerializer # include BatchLoaderHelper @@ -6,7 +8,16 @@ class DownloadSerializer set_type :events set_id :uuid - attributes :subj_id, :obj_id, :source_id, :relation_type_id, :total, :message_action, :source_token, :license, :occurred_at, :timestamp + attributes :subj_id, + :obj_id, + :source_id, + :relation_type_id, + :total, + :message_action, + :source_token, + :license, + :occurred_at, + :timestamp # has_many :dois, record_type: :dois, serializer: DoiSerializer, id_method_name: :doi do |object| # load_doi(object) diff --git a/app/serializers/event_serializer.rb b/app/serializers/event_serializer.rb index 9da8f95cc..97f5bc970 100644 --- a/app/serializers/event_serializer.rb +++ b/app/serializers/event_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class EventSerializer include FastJsonapi::ObjectSerializer # include BatchLoaderHelper @@ -6,7 +8,19 @@ class EventSerializer set_type :events set_id :uuid - attributes :subj_id, :obj_id, :source_id, :target_doi, :relation_type_id, :source_relation_type_id, :target_relation_type_id, :total, :message_action, :source_token, :license, :occurred_at, :timestamp + attributes :subj_id, + :obj_id, + :source_id, + :target_doi, + :relation_type_id, + :source_relation_type_id, + :target_relation_type_id, + :total, + :message_action, + :source_token, + :license, + :occurred_at, + :timestamp attribute :timestamp, &:updated_at diff --git a/app/serializers/media_serializer.rb b/app/serializers/media_serializer.rb index e48d8fefe..84004bfeb 100644 --- a/app/serializers/media_serializer.rb +++ b/app/serializers/media_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class MediaSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower diff --git a/app/serializers/member_serializer.rb b/app/serializers/member_serializer.rb index d4e39496b..08a81149a 100644 --- a/app/serializers/member_serializer.rb +++ b/app/serializers/member_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class MemberSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash @@ -5,7 +7,21 @@ class MemberSerializer set_id :uid # don't cache members, as they use the provider model - attributes :title, :display_title, :description, :member_type, :organization_type, :focus_area, :region, :country, :year, :logo_url, :email, :website, :joined, :created, :updated + attributes :title, + :display_title, + :description, + :member_type, + :organization_type, + :focus_area, + :region, + :country, + :year, + :logo_url, + :email, + :website, + :joined, + :created, + :updated attribute :title, &:name diff --git a/app/serializers/metadata_serializer.rb b/app/serializers/metadata_serializer.rb index 14808c6bd..d8f317627 100644 --- a/app/serializers/metadata_serializer.rb +++ b/app/serializers/metadata_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class MetadataSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower diff --git a/app/serializers/object_serializer.rb b/app/serializers/object_serializer.rb index 264af44c5..9c8f8bbe3 100644 --- a/app/serializers/object_serializer.rb +++ b/app/serializers/object_serializer.rb @@ -1,9 +1,22 @@ +# frozen_string_literal: true + class ObjectSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower set_type :objects - attributes :subtype, :name, :author, :publisher, :periodical, :included_in_data_catalog, :version, :date_published, :date_modified, :funder, :proxy_identifiers, :registrant_id + attributes :subtype, + :name, + :author, + :publisher, + :periodical, + :included_in_data_catalog, + :version, + :date_published, + :date_modified, + :funder, + :proxy_identifiers, + :registrant_id attribute :subtype do |object| object["@type"] diff --git a/app/serializers/old_event_serializer.rb b/app/serializers/old_event_serializer.rb index dd0a6908f..acd3118f6 100644 --- a/app/serializers/old_event_serializer.rb +++ b/app/serializers/old_event_serializer.rb @@ -1,10 +1,21 @@ +# frozen_string_literal: true + class OldEventSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash set_type :events set_id :uuid - attributes :subj_id, :obj_id, :source_id, :relation_type_id, :total, :message_action, :source_token, :license, :occurred_at, :timestamp + attributes :subj_id, + :obj_id, + :source_id, + :relation_type_id, + :total, + :message_action, + :source_token, + :license, + :occurred_at, + :timestamp belongs_to :subj, serializer: OldObjectSerializer, record_type: :objects belongs_to :obj, serializer: OldObjectSerializer, record_type: :objects diff --git a/app/serializers/old_object_serializer.rb b/app/serializers/old_object_serializer.rb index 330cc5fd3..66d675549 100644 --- a/app/serializers/old_object_serializer.rb +++ b/app/serializers/old_object_serializer.rb @@ -1,9 +1,21 @@ +# frozen_string_literal: true + class OldObjectSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash set_type :objects - attributes :subtype, :name, :author, :periodical, :volume_number, :issue_number, :pagination, :publisher, :issn, :version, :date_published + attributes :subtype, + :name, + :author, + :periodical, + :volume_number, + :issue_number, + :pagination, + :publisher, + :issn, + :version, + :date_published attribute :subtype do |object| object["@type"] diff --git a/app/serializers/prefix_serializer.rb b/app/serializers/prefix_serializer.rb index 62040c851..416691c24 100644 --- a/app/serializers/prefix_serializer.rb +++ b/app/serializers/prefix_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class PrefixSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower diff --git a/app/serializers/provider_prefix_serializer.rb b/app/serializers/provider_prefix_serializer.rb index bbb3b0e5b..078aac52a 100644 --- a/app/serializers/provider_prefix_serializer.rb +++ b/app/serializers/provider_prefix_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ProviderPrefixSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower diff --git a/app/serializers/provider_serializer.rb b/app/serializers/provider_serializer.rb index 1bed86fcd..855da14c5 100644 --- a/app/serializers/provider_serializer.rb +++ b/app/serializers/provider_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ProviderSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower @@ -5,12 +7,48 @@ class ProviderSerializer set_id :uid # cache_options enabled: true, cache_length: 24.hours ### we cannot filter if we cache - attributes :name, :display_name, :symbol, :website, :system_email, :group_email, :globus_uuid, :description, :region, :country, :logo_url, :member_type, :organization_type, :focus_area, :non_profit_status, :is_active, :has_password, :joined, :twitter_handle, :billing_information, :ror_id, :salesforce_id, :technical_contact, :secondary_technical_contact, :billing_contact, :secondary_billing_contact, :service_contact, :secondary_service_contact, :voting_contact, :created, :updated + attributes :name, + :display_name, + :symbol, + :website, + :system_email, + :group_email, + :globus_uuid, + :description, + :region, + :country, + :logo_url, + :member_type, + :organization_type, + :focus_area, + :non_profit_status, + :is_active, + :has_password, + :joined, + :twitter_handle, + :billing_information, + :ror_id, + :salesforce_id, + :technical_contact, + :secondary_technical_contact, + :billing_contact, + :secondary_billing_contact, + :service_contact, + :secondary_service_contact, + :voting_contact, + :created, + :updated has_many :clients, record_type: :clients has_many :prefixes, record_type: :prefixes - belongs_to :consortium, record_type: :providers, serializer: ProviderSerializer, if: Proc.new { |provider| provider.consortium_id } - has_many :consortium_organizations, record_type: :providers, serializer: ProviderSerializer, if: Proc.new { |provider| provider.member_type == "consortium" } + belongs_to :consortium, + record_type: :providers, + serializer: ProviderSerializer, + if: Proc.new(&:consortium_id) + has_many :consortium_organizations, + record_type: :providers, + serializer: ProviderSerializer, + if: Proc.new { |provider| provider.member_type == "consortium" } attribute :country, &:country_code @@ -22,42 +60,124 @@ class ProviderSerializer object.password.present? end - attribute :billing_information, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_billing_information, object) == true } do |object| - object.billing_information.present? ? object.billing_information.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + attribute :billing_information, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_billing_information, + object, + ) == + true + } do |object| + if object.billing_information.present? + object.billing_information.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end - attribute :twitter_handle, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_billing_information, object) == true }, &:twitter_handle - - attribute :globus_uuid, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_billing_information, object) == true }, &:globus_uuid + attribute :twitter_handle, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_billing_information, + object, + ) == + true + }, + &:twitter_handle + + attribute :globus_uuid, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_billing_information, + object, + ) == + true + }, + &:globus_uuid # Convert all contacts json models back to json style camelCase attribute :technical_contact do |object| - object.technical_contact.present? ? object.technical_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.technical_contact.present? + object.technical_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :secondary_technical_contact do |object| - object.secondary_technical_contact.present? ? object.secondary_technical_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.secondary_technical_contact.present? + object.secondary_technical_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :billing_contact do |object| - object.billing_contact.present? ? object.billing_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.billing_contact.present? + object.billing_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :secondary_billing_contact do |object| - object.secondary_billing_contact.present? ? object.secondary_billing_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.secondary_billing_contact.present? + object.secondary_billing_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :service_contact do |object| - object.service_contact.present? ? object.service_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.service_contact.present? + object.service_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :secondary_service_contact do |object| - object.secondary_service_contact.present? ? object.secondary_service_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.secondary_service_contact.present? + object.secondary_service_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end attribute :voting_contact do |object| - object.voting_contact.present? ? object.voting_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.voting_contact.present? + object.voting_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } + else + {} + end end - attribute :salesforce_id, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_salesforce_id, object) == true }, &:salesforce_id + attribute :salesforce_id, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?(:read_salesforce_id, object) == + true + }, + &:salesforce_id end diff --git a/app/serializers/repository_prefix_serializer.rb b/app/serializers/repository_prefix_serializer.rb index 319e92f73..a4eada978 100644 --- a/app/serializers/repository_prefix_serializer.rb +++ b/app/serializers/repository_prefix_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class RepositoryPrefixSerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower @@ -6,7 +8,8 @@ class RepositoryPrefixSerializer attributes :created_at, :updated_at - belongs_to :repository, object_method_name: :client, id_method_name: :client_id + belongs_to :repository, + object_method_name: :client, id_method_name: :client_id belongs_to :provider belongs_to :provider_prefix belongs_to :prefix diff --git a/app/serializers/repository_serializer.rb b/app/serializers/repository_serializer.rb index f2cdf9f0d..3acc86e77 100644 --- a/app/serializers/repository_serializer.rb +++ b/app/serializers/repository_serializer.rb @@ -1,10 +1,30 @@ +# frozen_string_literal: true + class RepositorySerializer include FastJsonapi::ObjectSerializer set_key_transform :camel_lower set_type :repositories set_id :uid - attributes :name, :symbol, :re3data, :opendoar, :year, :system_email, :globus_uuid, :alternate_name, :description, :client_type, :repository_type, :language, :certificate, :domains, :issn, :url, :salesforce_id, :created, :updated + attributes :name, + :symbol, + :re3data, + :opendoar, + :year, + :system_email, + :globus_uuid, + :alternate_name, + :description, + :client_type, + :repository_type, + :language, + :certificate, + :domains, + :issn, + :url, + :salesforce_id, + :created, + :updated belongs_to :provider, record_type: :providers has_many :prefixes, record_type: :prefixes @@ -14,7 +34,9 @@ class RepositorySerializer end attribute :opendoar do |object| - "https://v2.sherpa.ac.uk/id/repository/#{object.opendoar_id}" if object.opendoar_id.present? + if object.opendoar_id.present? + "https://v2.sherpa.ac.uk/id/repository/#{object.opendoar_id}" + end end attribute :is_active do |object| @@ -26,10 +48,33 @@ class RepositorySerializer end attribute :service_contact do |object| - object.service_contact.present? ? object.service_contact.transform_keys! { |key| key.to_s.camelcase(:lower) } : {} + if object.service_contact.present? + object.service_contact.transform_keys! do |key| + key.to_s.camelcase(:lower) + end + else + {} + end end - attribute :globus_uuid, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_billing_information, object) == true }, &:globus_uuid + attribute :globus_uuid, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?( + :read_billing_information, + object, + ) == + true + }, + &:globus_uuid - attribute :salesforce_id, if: Proc.new { |object, params| params[:current_ability] && params[:current_ability].can?(:read_salesforce_id, object) == true }, &:salesforce_id + attribute :salesforce_id, + if: + Proc.new { |object, params| + params[:current_ability] && + params[:current_ability].can?(:read_salesforce_id, object) == + true + }, + &:salesforce_id end diff --git a/app/serializers/resource_type_serializer.rb b/app/serializers/resource_type_serializer.rb index d1920131d..06373fa3b 100644 --- a/app/serializers/resource_type_serializer.rb +++ b/app/serializers/resource_type_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ResourceTypeSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash diff --git a/app/serializers/view_serializer.rb b/app/serializers/view_serializer.rb index 1ae22f85d..d02ae0b3a 100644 --- a/app/serializers/view_serializer.rb +++ b/app/serializers/view_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class ViewSerializer include FastJsonapi::ObjectSerializer # include BatchLoaderHelper @@ -6,7 +8,16 @@ class ViewSerializer set_type :events set_id :uuid - attributes :subj_id, :obj_id, :source_id, :relation_type_id, :total, :message_action, :source_token, :license, :occurred_at, :timestamp + attributes :subj_id, + :obj_id, + :source_id, + :relation_type_id, + :total, + :message_action, + :source_token, + :license, + :occurred_at, + :timestamp # has_many :dois, record_type: :dois, serializer: DoiSerializer, id_method_name: :doi do |object| # load_doi(object) diff --git a/app/serializers/work_serializer.rb b/app/serializers/work_serializer.rb index 37ab8a832..143fbbb30 100644 --- a/app/serializers/work_serializer.rb +++ b/app/serializers/work_serializer.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class WorkSerializer include FastJsonapi::ObjectSerializer set_key_transform :dash @@ -5,17 +7,47 @@ class WorkSerializer set_id :identifier cache_options enabled: true, cache_length: 24.hours - attributes :doi, :identifier, :url, :author, :title, :container_title, :description, :resource_type_subtype, :data_center_id, :member_id, :resource_type_id, :version, :license, :schema_version, :results, :related_identifiers, :citation_count, :citations_over_time, :view_count, :views_over_time, :download_count, :downloads_over_time, :published, :registered, :checked, :updated, :media, :xml - - belongs_to :client, key: "data-center", record_type: "data-centers", serializer: :DataCenter + attributes :doi, + :identifier, + :url, + :author, + :title, + :container_title, + :description, + :resource_type_subtype, + :data_center_id, + :member_id, + :resource_type_id, + :version, + :license, + :schema_version, + :results, + :related_identifiers, + :citation_count, + :citations_over_time, + :view_count, + :views_over_time, + :download_count, + :downloads_over_time, + :published, + :registered, + :checked, + :updated, + :media, + :xml + + belongs_to :client, + key: "data-center", + record_type: "data-centers", + serializer: :DataCenter belongs_to :provider, key: :member, record_type: :members, serializer: :Member - belongs_to :resource_type, record_type: "resource-types", serializer: :ResourceType + belongs_to :resource_type, + record_type: "resource-types", serializer: :ResourceType attribute :author do |object| Array.wrap(object.creators).map do |c| if c["givenName"].present? || c["familyName"].present? - { "given" => c["givenName"], - "family" => c["familyName"] }.compact + { "given" => c["givenName"], "family" => c["familyName"] }.compact else { "literal" => c["name"] }.presence end diff --git a/app/validators/billing_information_validator.rb b/app/validators/billing_information_validator.rb index f75ba4dc1..1beeab977 100644 --- a/app/validators/billing_information_validator.rb +++ b/app/validators/billing_information_validator.rb @@ -1,11 +1,11 @@ +# frozen_string_literal: true + class BillingInformationValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) # Don't try to validate if we have nothing return if value.blank? - if value["city"].blank? - record.errors[attribute] << "has no city specified" - end + record.errors[attribute] << "has no city specified" if value["city"].blank? if value["state"].blank? record.errors[attribute] << "has no state/province specified" diff --git a/app/validators/contact_validator.rb b/app/validators/contact_validator.rb index 56810ad64..231e79b1a 100644 --- a/app/validators/contact_validator.rb +++ b/app/validators/contact_validator.rb @@ -1,10 +1,13 @@ +# frozen_string_literal: true + class ContactValidator < ActiveModel::EachValidator def validate_each(record, attribute, value) # Don't try to validate if we have nothing return if value.blank? # Email validation - unless value["email"].present? && value["email"] =~ /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z/i + unless value["email"].present? && + value["email"] =~ /\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\z/i record.errors[attribute] << "has an invalid email" end diff --git a/app/validators/xml_schema_validator.rb b/app/validators/xml_schema_validator.rb index cd6e813c7..c738f5600 100644 --- a/app/validators/xml_schema_validator.rb +++ b/app/validators/xml_schema_validator.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class XmlSchemaValidator < ActiveModel::EachValidator # mapping of DataCite schema properties to database fields def schema_attributes(el) @@ -37,18 +39,29 @@ def validate_each(record, _attribute, value) kernel = get_valid_kernel(record.schema_version) return false if kernel.blank? - if record.new_record? && ["http://datacite.org/schema/kernel-2.1", "http://datacite.org/schema/kernel-2.2"].include?(record.schema_version) - record.errors[:xml] << "Schema #{record.schema_version} is no longer supported" + if record.new_record? && + %w[ + http://datacite.org/schema/kernel-2.1 + http://datacite.org/schema/kernel-2.2 + ].include?(record.schema_version) + record.errors[:xml] << + "Schema #{record.schema_version} is no longer supported" return false end - filepath = Bundler.rubygems.find_name("bolognese").first.full_gem_path + "/resources/#{kernel}/metadata.xsd" - schema = Nokogiri::XML::Schema(open(filepath)) + filepath = + Bundler.rubygems.find_name("bolognese").first.full_gem_path + + "/resources/#{kernel}/metadata.xsd" + schema = Nokogiri::XML.Schema(open(filepath)) - schema.validate(Nokogiri::XML(value, nil, "UTF-8")).reduce({}) do |_sum, error| + schema.validate(Nokogiri.XML(value, nil, "UTF-8")).reduce( + {}, + ) do |_sum, error| location, level, source, text = error.message.split(": ", 4) line, column = location.split(":", 2) - title = text.to_s.strip + " at line #{line}, column #{column}" if line.present? + if line.present? + title = text.to_s.strip + " at line #{line}, column #{column}" + end source = source.split("}").last[0..-2] if line.present? source = schema_attributes(source) if source.present? record.errors[source.to_sym] << title diff --git a/bin/bundle b/bin/bundle index 67efc37fb..5015ba6f8 100755 --- a/bin/bundle +++ b/bin/bundle @@ -1,3 +1,5 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__) load Gem.bin_path("bundler", "bundle") diff --git a/bin/rails b/bin/rails index 36b01a1ce..e1fc2a9ce 100755 --- a/bin/rails +++ b/bin/rails @@ -1,4 +1,6 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + begin load File.expand_path("spring", __dir__) rescue LoadError => e diff --git a/bin/rake b/bin/rake index 660472a03..d4e83f82a 100755 --- a/bin/rake +++ b/bin/rake @@ -1,4 +1,6 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + begin load File.expand_path("spring", __dir__) rescue LoadError => e diff --git a/bin/rspec b/bin/rspec index 7327c1279..55dd70542 100755 --- a/bin/rspec +++ b/bin/rspec @@ -1,4 +1,6 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + begin load File.expand_path("spring", __dir__) rescue LoadError => e diff --git a/bin/setup b/bin/setup index 37be32ecd..e413f40e3 100755 --- a/bin/setup +++ b/bin/setup @@ -1,4 +1,6 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + require "pathname" require "fileutils" include FileUtils diff --git a/bin/spring b/bin/spring index b44ad1a7a..3ad0d069d 100755 --- a/bin/spring +++ b/bin/spring @@ -1,4 +1,5 @@ #!/usr/bin/env ruby +# frozen_string_literal: true # This file loads spring without using Bundler, in order to be fast. # It gets overwritten when you run the `spring binstub` command. diff --git a/bin/update b/bin/update index db28935ec..998dedea3 100755 --- a/bin/update +++ b/bin/update @@ -1,4 +1,6 @@ #!/usr/bin/env ruby +# frozen_string_literal: true + require "pathname" require "fileutils" include FileUtils diff --git a/config.ru b/config.ru index 441e6ff0c..bff88d608 100644 --- a/config.ru +++ b/config.ru @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # This file is used by Rack-based servers to start the application. require_relative "config/environment" diff --git a/config/application.rb b/config/application.rb index dd7b0d23a..eb1508d0c 100644 --- a/config/application.rb +++ b/config/application.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require_relative "boot" require "rails" @@ -68,10 +70,14 @@ class Application < Rails::Application config.load_defaults 5.2 # include graphql - config.paths.add Rails.root.join("app", "graphql", "types").to_s, eager_load: true - config.paths.add Rails.root.join("app", "graphql", "mutations").to_s, eager_load: true - config.paths.add Rails.root.join("app", "graphql", "connections").to_s, eager_load: true - config.paths.add Rails.root.join("app", "graphql", "resolvers").to_s, eager_load: true + config.paths.add Rails.root.join("app", "graphql", "types").to_s, + eager_load: true + config.paths.add Rails.root.join("app", "graphql", "mutations").to_s, + eager_load: true + config.paths.add Rails.root.join("app", "graphql", "connections").to_s, + eager_load: true + config.paths.add Rails.root.join("app", "graphql", "resolvers").to_s, + eager_load: true # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers @@ -88,7 +94,9 @@ class Application < Rails::Application # enable datadog tracing here so that we can inject tracing # information into logs Datadog.configure do |c| - c.tracer hostname: "datadog.local", enabled: Rails.env.production?, env: Rails.env + c.tracer hostname: "datadog.local", + enabled: Rails.env.production?, + env: Rails.env c.use :rails, service_name: "client-api" c.use :elasticsearch c.use :active_record, analytics_enabled: false @@ -103,28 +111,32 @@ class Application < Rails::Application config.lograge.enabled = true config.lograge.formatter = Lograge::Formatters::Logstash.new config.lograge.logger = LogStashLogger.new(type: :stdout) - config.logger = config.lograge.logger ## LogStashLogger needs to be pass to rails logger, see roidrage/lograge#26 - config.log_level = ENV["LOG_LEVEL"].to_sym ## Log level in a config level configuration + config.logger = config.lograge.logger ## LogStashLogger needs to be pass to rails logger, see roidrage/lograge#26 + config.log_level = ENV["LOG_LEVEL"].to_sym ## Log level in a config level configuration - config.lograge.ignore_actions = ["HeartbeatController#index", "IndexController#index"] + config.lograge.ignore_actions = %w[ + HeartbeatController#index + IndexController#index + ] config.lograge.ignore_custom = lambda do |event| - event.payload.inspect.length > 100000 + event.payload.inspect.length > 100_000 end config.lograge.base_controller_class = "ActionController::API" config.lograge.custom_options = lambda do |event| # Retrieves trace information for current thread - correlation = Datadog.tracer.active_correlation + correlation = + Datadog.tracer.active_correlation - exceptions = %w(controller action format id) + exceptions = %w[controller action format id] { - # Adds IDs as tags to log output dd: { + # Adds IDs as tags to log output trace_id: correlation.trace_id, span_id: correlation.span_id, }, - ddsource: ["ruby"], + ddsource: %w[ruby], params: event.payload[:params].except(*exceptions), uid: event.payload[:uid], } @@ -136,11 +148,26 @@ class Application < Rails::Application # raise error with unpermitted parameters config.action_controller.action_on_unpermitted_parameters = :log - config.action_view.sanitized_allowed_tags = %w(strong em b i code pre sub sup br) + config.action_view.sanitized_allowed_tags = %w[ + strong + em + b + i + code + pre + sub + sup + br + ] config.action_view.sanitized_allowed_attributes = [] # make sure all input is UTF-8 - config.middleware.insert 0, Rack::UTF8Sanitizer, additional_content_types: ["application/vnd.api+json", "application/xml"] + config.middleware.insert 0, + Rack::UTF8Sanitizer, + additional_content_types: %w[ + application/vnd.api+json + application/xml + ] # detect bots and crawlers config.middleware.use Rack::CrawlerDetect @@ -152,26 +179,20 @@ class Application < Rails::Application config.middleware.use BatchLoader::Middleware # set Active Job queueing backend - config.active_job.queue_adapter = if ENV["AWS_REGION"] - :shoryuken - else - :inline - end + config.active_job.queue_adapter = ENV["AWS_REGION"] ? :shoryuken : :inline # use SQS based on environment, use "test" prefix for test system if Rails.env.stage? - config.active_job.queue_name_prefix = ENV["ES_PREFIX"].present? ? "stage" : "test" + config.active_job.queue_name_prefix = + ENV["ES_PREFIX"].present? ? "stage" : "test" else config.active_job.queue_name_prefix = Rails.env end - config.generators do |g| - g.fixture_replacement :factory_bot - end + config.generators { |g| g.fixture_replacement :factory_bot } config.paperclip_defaults = { - storage: :filesystem, - url: "/images/members/:filename", + storage: :filesystem, url: "/images/members/:filename" } end end diff --git a/config/boot.rb b/config/boot.rb index d2ebcbca6..9bc4ad5c4 100644 --- a/config/boot.rb +++ b/config/boot.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../Gemfile", __dir__) require "bundler/setup" # Set up gems listed in the Gemfile. diff --git a/config/environment.rb b/config/environment.rb index e035ef4bf..81bcd07bd 100644 --- a/config/environment.rb +++ b/config/environment.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # Load the Rails application. require_relative "application" diff --git a/config/environments/development.rb b/config/environments/development.rb index 72c26f357..e418cc8c5 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. diff --git a/config/environments/production.rb b/config/environments/production.rb index e4c503b08..57da5fd0e 100644 --- a/config/environments/production.rb +++ b/config/environments/production.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. @@ -11,7 +13,7 @@ config.eager_load = true # Full error reports are disabled and caching is turned on. - config.consider_all_requests_local = false + config.consider_all_requests_local = false config.action_controller.perform_caching = true # Attempt to read encrypted secrets from `config/secrets.yml.enc`. diff --git a/config/environments/stage.rb b/config/environments/stage.rb index e846e5e0e..0dfa2e72f 100644 --- a/config/environments/stage.rb +++ b/config/environments/stage.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. diff --git a/config/environments/test.rb b/config/environments/test.rb index a556d6ccf..23395fb8c 100644 --- a/config/environments/test.rb +++ b/config/environments/test.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + Rails.application.configure do # Settings specified here will take precedence over those in config/application.rb. @@ -19,7 +21,7 @@ } # Show full error reports and disable caching. - config.consider_all_requests_local = true + config.consider_all_requests_local = true config.action_controller.perform_caching = true config.cache_store = :dalli_store diff --git a/config/initializers/_shoryuken.rb b/config/initializers/_shoryuken.rb index 22f97ad8b..497d1c80d 100644 --- a/config/initializers/_shoryuken.rb +++ b/config/initializers/_shoryuken.rb @@ -8,9 +8,7 @@ class RavenReporter def call(_worker_instance, queue, _sqs_msg, body) tags = { job: body["job_class"], queue: queue } context = { message: body } - Raven.capture(tags: tags, extra: context) do - yield - end + Raven.capture(tags: tags, extra: context) { yield } end end end diff --git a/config/initializers/_token.rb b/config/initializers/_token.rb index 254b5bc88..70ce7ff67 100644 --- a/config/initializers/_token.rb +++ b/config/initializers/_token.rb @@ -1,4 +1,4 @@ # frozen_string_literal: true # generate token for jwt authentication with Profiles service, valid for 12 months -ENV["VOLPINO_TOKEN"] = User.generate_token(exp: 3600 * 30 * 12) +ENV["VOLPINO_TOKEN"] = User.generate_token(exp: 3_600 * 30 * 12) diff --git a/config/initializers/backtrace_silencers.rb b/config/initializers/backtrace_silencers.rb index 59385cdf3..d0f0d3b5d 100644 --- a/config/initializers/backtrace_silencers.rb +++ b/config/initializers/backtrace_silencers.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # Be sure to restart your server when you modify this file. # You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. diff --git a/config/initializers/constants.rb b/config/initializers/constants.rb index b7bc5552f..7549814d2 100644 --- a/config/initializers/constants.rb +++ b/config/initializers/constants.rb @@ -2,40 +2,44 @@ class IdentifierError < RuntimeError; end -RESCUABLE_EXCEPTIONS = [CanCan::AccessDenied, - CanCan::AuthorizationNotPerformed, - ActiveModelSerializers::Adapter::JsonApi::Deserialization::InvalidDocument, - JWT::DecodeError, - JWT::VerificationError, - JSON::ParserError, - Nokogiri::XML::SyntaxError, - NoMethodError, - SocketError, - ActionDispatch::Http::Parameters::ParseError, - ActiveRecord::RecordNotUnique, - ActiveRecord::RecordNotFound, - AbstractController::ActionNotFound, - ActionController::BadRequest, - ActionController::UnknownFormat, - ActionController::RoutingError, - ActionController::ParameterMissing, - ActionController::UnpermittedParameters].freeze +RESCUABLE_EXCEPTIONS = [ + CanCan::AccessDenied, + CanCan::AuthorizationNotPerformed, + ActiveModelSerializers::Adapter::JsonApi::Deserialization::InvalidDocument, + JWT::DecodeError, + JWT::VerificationError, + JSON::ParserError, + Nokogiri::XML::SyntaxError, + NoMethodError, + SocketError, + ActionDispatch::Http::Parameters::ParseError, + ActiveRecord::RecordNotUnique, + ActiveRecord::RecordNotFound, + AbstractController::ActionNotFound, + ActionController::BadRequest, + ActionController::UnknownFormat, + ActionController::RoutingError, + ActionController::ParameterMissing, + ActionController::UnpermittedParameters, +].freeze # Format used for DOI validation # The prefix is 10.x where x is 4-5 digits. The suffix can be anything, but can"t be left off -DOI_FORMAT = %r(\A10\.\d{4,5}/.+).freeze +DOI_FORMAT = %r{\A10\.\d{4,5}/.+}.freeze # Format used for URL validation -URL_FORMAT = %r(\A(http|https|ftp):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?\z).freeze +URL_FORMAT = %r{\A(http|https|ftp)://[a-z0-9]+([\-.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?/.*)?\z}. + freeze # Form queue options -QUEUE_OPTIONS = ["high", "default", "low"].freeze +QUEUE_OPTIONS = %w[high default low].freeze # Version of ORCID API ORCID_VERSION = "1.2" # ORCID schema -ORCID_SCHEMA = "https://raw.githubusercontent.com/ORCID/ORCID-Source/master/orcid-model/src/main/resources/orcid-message-1.2.xsd" +ORCID_SCHEMA = + "https://raw.githubusercontent.com/ORCID/ORCID-Source/master/orcid-model/src/main/resources/orcid-message-1.2.xsd" # Version of DataCite API DATACITE_VERSION = "4" @@ -70,11 +74,4 @@ class IdentifierError < RuntimeError; end LAST_SCHEMA_VERSION = "http://datacite.org/schema/kernel-4" -METADATA_FORMATS = [ - "schema_org", - "ris", - "bibtex", - "citeproc", - "crossref", - "codemeta", -].freeze +METADATA_FORMATS = %w[schema_org ris bibtex citeproc crossref codemeta].freeze diff --git a/config/initializers/cors.rb b/config/initializers/cors.rb index f00f79b31..6cd00e924 100644 --- a/config/initializers/cors.rb +++ b/config/initializers/cors.rb @@ -7,13 +7,17 @@ # Read more: https://github.com/cyu/rack-cors -Rails.application.config.middleware.insert_before 0, Rack::Cors, debug: true, logger: (-> { Rails.logger }) do +Rails.application.config.middleware.insert_before 0, + Rack::Cors, + debug: true, + logger: + (-> { Rails.logger }) do allow do origins "*" resource "*", headers: :any, - expose: ["X-Credential-Username", "X-Anonymous-Consumer"], + expose: %w[X-Credential-Username X-Anonymous-Consumer], methods: %i[get post put patch delete options head] end end diff --git a/config/initializers/elasticsearch.rb b/config/initializers/elasticsearch.rb index 3394b620d..633b08a38 100644 --- a/config/initializers/elasticsearch.rb +++ b/config/initializers/elasticsearch.rb @@ -3,20 +3,26 @@ require "faraday_middleware" require "faraday_middleware/aws_sigv4" -if ENV["ES_HOST"] == "elasticsearch.test.datacite.org" || ENV["ES_HOST"] == "elasticsearch.datacite.org" - Elasticsearch::Model.client = Elasticsearch::Client.new( - host: ENV["ES_HOST"], - port: "80", - scheme: "http", - request_timeout: ENV["ES_REQUEST_TIMEOUT"].to_i, - ) do |f| - f.request :aws_sigv4, - credentials: Aws::Credentials.new(ENV["AWS_ACCESS_KEY_ID"], ENV["AWS_SECRET_ACCESS_KEY"]), - service: "es", - region: ENV["AWS_REGION"] +if ENV["ES_HOST"] == "elasticsearch.test.datacite.org" || + ENV["ES_HOST"] == "elasticsearch.datacite.org" + Elasticsearch::Model.client = + Elasticsearch::Client.new( + host: ENV["ES_HOST"], + port: "80", + scheme: "http", + request_timeout: ENV["ES_REQUEST_TIMEOUT"].to_i, + ) do |f| + f.request :aws_sigv4, + credentials: + Aws::Credentials.new( + ENV["AWS_ACCESS_KEY_ID"], + ENV["AWS_SECRET_ACCESS_KEY"], + ), + service: "es", + region: ENV["AWS_REGION"] - f.adapter :excon - end + f.adapter :excon + end else # config = { # host: ENV['ES_HOST'], @@ -24,7 +30,12 @@ # request: { timeout: 30 } # } # } - Elasticsearch::Model.client = Elasticsearch::Client.new(host: ENV["ES_HOST"], port: ENV["ES_PORT"], scheme: ENV["ES_SCHEME"], user: "elastic", password: ENV["ELASTIC_PASSWORD"]) do |f| - f.adapter :excon - end + Elasticsearch::Model.client = + Elasticsearch::Client.new( + host: ENV["ES_HOST"], + port: ENV["ES_PORT"], + scheme: ENV["ES_SCHEME"], + user: "elastic", + password: ENV["ELASTIC_PASSWORD"], + ) { |f| f.adapter :excon } end diff --git a/config/initializers/filter_parameter_logging.rb b/config/initializers/filter_parameter_logging.rb index 7a4f47b4c..2c112eb67 100644 --- a/config/initializers/filter_parameter_logging.rb +++ b/config/initializers/filter_parameter_logging.rb @@ -3,4 +3,4 @@ # Be sure to restart your server when you modify this file. # Configure sensitive parameters which will be filtered from the log file. -Rails.application.config.filter_parameters += [:password] +Rails.application.config.filter_parameters += %i[password] diff --git a/config/initializers/flipper.rb b/config/initializers/flipper.rb index 19defab74..3d6e4c73e 100644 --- a/config/initializers/flipper.rb +++ b/config/initializers/flipper.rb @@ -15,7 +15,12 @@ adapter = Flipper::Adapters::Http.new(configuration) unless Rails.env.test? cache = ActiveSupport::Cache::MemCacheStore.new(ENV["MEMCACHE_SERVERS"]) - adapter = Flipper::Adapters::ActiveSupportCacheStore.new(adapter, cache, expires_in: 1.hour) + adapter = + Flipper::Adapters::ActiveSupportCacheStore.new( + adapter, + cache, + expires_in: 1.hour, + ) end flipper = Flipper.new(adapter, instrumenter: ActiveSupport::Notifications) end @@ -23,7 +28,8 @@ if Rails.env.development? require "flipper/instrumentation/log_subscriber" - Flipper::Instrumentation::LogSubscriber.logger = ActiveSupport::Logger.new(STDOUT) + Flipper::Instrumentation::LogSubscriber.logger = + ActiveSupport::Logger.new(STDOUT) end Flipper.register(:staff) do |actor| diff --git a/config/initializers/inflections.rb b/config/initializers/inflections.rb index a13a89e50..a35e5e698 100644 --- a/config/initializers/inflections.rb +++ b/config/initializers/inflections.rb @@ -4,5 +4,5 @@ # are locale specific, and you may define rules for as many different # locales as you wish. All of these examples are active by default: ActiveSupport::Inflector.inflections(:en) do |inflect| - inflect.uncountable %w(status heartbeat metadata media random) + inflect.uncountable %w[status heartbeat metadata media random] end diff --git a/config/initializers/mime_types.rb b/config/initializers/mime_types.rb index 34cf7a80d..d4efacfcf 100644 --- a/config/initializers/mime_types.rb +++ b/config/initializers/mime_types.rb @@ -6,19 +6,31 @@ end # re-register some default Mime types -Mime::Type.register "text/html", :html, %w(application/xhtml+xml), %w(xhtml) -Mime::Type.register "text/plain", :text, [], %w(txt) -Mime::Type.register "application/json", :json, %w(text/x-json application/jsonrequest application/vnd.api+json) +Mime::Type.register "text/html", :html, %w[application/xhtml+xml], %w[xhtml] +Mime::Type.register "text/plain", :text, [], %w[txt] +Mime::Type.register "application/json", + :json, + %w[ + text/x-json + application/jsonrequest + application/vnd.api+json + ] Mime::Type.register "text/csv", :csv # Mime types supported by bolognese gem https://github.com/datacite/bolognese Mime::Type.register "application/vnd.crossref.unixref+xml", :crossref Mime::Type.register "application/vnd.crosscite.crosscite+json", :crosscite -Mime::Type.register "application/vnd.datacite.datacite+xml", :datacite, %w(application/x-datacite+xml) +Mime::Type.register "application/vnd.datacite.datacite+xml", + :datacite, + %w[application/x-datacite+xml] Mime::Type.register "application/vnd.datacite.datacite+json", :datacite_json -Mime::Type.register "application/vnd.schemaorg.ld+json", :schema_org, %w(application/ld+json) +Mime::Type.register "application/vnd.schemaorg.ld+json", + :schema_org, + %w[application/ld+json] Mime::Type.register "application/vnd.jats+xml", :jats -Mime::Type.register "application/vnd.citationstyles.csl+json", :citeproc, %w(application/citeproc+json) +Mime::Type.register "application/vnd.citationstyles.csl+json", + :citeproc, + %w[application/citeproc+json] Mime::Type.register "application/vnd.codemeta.ld+json", :codemeta Mime::Type.register "application/x-bibtex", :bibtex Mime::Type.register "application/x-research-info-systems", :ris @@ -31,22 +43,20 @@ end ActionController::Renderers.add :citation do |obj, options| - begin - Array.wrap(obj).map do |o| - o.style = options[:style] || "apa" - o.locale = options[:locale] || "en-US" - o.citation - end.join("\n\n") - rescue CSL::ParseError # unknown style and/or location - Array.wrap(obj).map do |o| - o.style = "apa" - o.locale = "en-US" - o.citation - end.join("\n\n") - end + Array.wrap(obj).map do |o| + o.style = options[:style] || "apa" + o.locale = options[:locale] || "en-US" + o.citation + end.join("\n\n") +rescue CSL::ParseError # unknown style and/or location + Array.wrap(obj).map do |o| + o.style = "apa" + o.locale = "en-US" + o.citation + end.join("\n\n") end -%w(datacite_json schema_org crosscite citeproc codemeta).each do |f| +%w[datacite_json schema_org crosscite citeproc codemeta].each do |f| ActionController::Renderers.add f.to_sym do |obj, _options| if obj.is_a?(Array) "[\n" + Array.wrap(obj).map { |o| o.send(f) }.join(",\n") + "\n]" @@ -56,7 +66,7 @@ end end -%w(jats).each do |f| +%w[jats].each do |f| ActionController::Renderers.add f.to_sym do |obj, _options| Array.wrap(obj).map { |o| o.send(f) }.join("\n") end @@ -71,6 +81,5 @@ end ActionController::Renderers.add :csv do |obj, options| - options[:header].to_csv + - Array.wrap(obj).map { |o| o.send("csv") }.join("") + options[:header].to_csv + Array.wrap(obj).map { |o| o.send("csv") }.join("") end diff --git a/config/initializers/new_framework_defaults_5_2.rb b/config/initializers/new_framework_defaults_5_2.rb index 421e5a2a3..96edc78e0 100644 --- a/config/initializers/new_framework_defaults_5_2.rb +++ b/config/initializers/new_framework_defaults_5_2.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # Be sure to restart your server when you modify this file. # # This file contains migration options to ease your Rails 5.2 upgrade. diff --git a/config/initializers/paperclip.rb b/config/initializers/paperclip.rb index 14431ced6..dbec38524 100644 --- a/config/initializers/paperclip.rb +++ b/config/initializers/paperclip.rb @@ -1 +1,3 @@ +# frozen_string_literal: true + Paperclip::DataUriAdapter.register diff --git a/config/initializers/sentry.rb b/config/initializers/sentry.rb index 85d1471ad..7ccab62e1 100644 --- a/config/initializers/sentry.rb +++ b/config/initializers/sentry.rb @@ -3,9 +3,15 @@ Raven.configure do |config| config.dsn = ENV["SENTRY_DSN"] config.release = "lupo:" + Lupo::Application::VERSION - config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s) + config.sanitize_fields = + Rails.application.config.filter_parameters.map(&:to_s) # ignore 502, 503 and 504 from Elasticsearch - config.excluded_exceptions += ["Elasticsearch::Transport::Transport::Errors::BadGateway", "Elasticsearch::Transport::Transport::Errors::ServiceUnavailable", "Elasticsearch::Transport::Transport::Errors::GatewayTimeout"] + config.excluded_exceptions += + %w[ + Elasticsearch::Transport::Transport::Errors::BadGateway + Elasticsearch::Transport::Transport::Errors::ServiceUnavailable + Elasticsearch::Transport::Transport::Errors::GatewayTimeout + ] config.logger = Rails.application.config.lograge.logger end diff --git a/config/initializers/turnout.rb b/config/initializers/turnout.rb index 07ec68fc3..c88f5567b 100644 --- a/config/initializers/turnout.rb +++ b/config/initializers/turnout.rb @@ -2,6 +2,7 @@ Turnout.configure do |config| config.default_maintenance_page = Turnout::MaintenancePage::JSON - config.default_allowed_paths = ["^/heartbeat"] - config.default_reason = "The site is temporarily down for maintenance. Please check https://status.datacite.org for more information." + config.default_allowed_paths = %w[^/heartbeat] + config.default_reason = + "The site is temporarily down for maintenance. Please check https://status.datacite.org for more information." end diff --git a/config/initializers/wrap_parameters.rb b/config/initializers/wrap_parameters.rb index 2f3c0db47..bbf59be11 100644 --- a/config/initializers/wrap_parameters.rb +++ b/config/initializers/wrap_parameters.rb @@ -6,9 +6,7 @@ # is enabled by default. # Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array. -ActiveSupport.on_load(:action_controller) do - wrap_parameters format: [:json] -end +ActiveSupport.on_load(:action_controller) { wrap_parameters format: %i[json] } # To enable root element in JSON for ActiveRecord objects. # ActiveSupport.on_load(:active_record) do diff --git a/config/routes.rb b/config/routes.rb index 3492c61a6..1de931885 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -1,9 +1,13 @@ +# frozen_string_literal: true + Rails.application.routes.draw do post "/client-api/graphql", to: "graphql#execute" get "/client-api/graphql", to: "index#method_not_allowed" # global options responder -> makes sure OPTION request for CORS endpoints work - match "*path", via: [:options], to: lambda { |_| [204, { "Content-Type" => "text/plain" }] } + match "*path", + via: %i[options], + to: ->(_) { [204, { "Content-Type" => "text/plain" }] } # authentication post "token", to: "sessions#create_token" @@ -15,51 +19,128 @@ post "reset", to: "sessions#reset" # content negotiation via index path - get "/application/vnd.datacite.datacite+xml/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :datacite } - get "/application/vnd.datacite.datacite+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :datacite_json } - get "/application/vnd.crosscite.crosscite+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :crosscite } - get "/application/vnd.schemaorg.ld+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :schema_org } - get "/application/ld+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :schema_org } - get "/application/vnd.codemeta.ld+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :codemeta } - get "/application/vnd.citationstyles.csl+json/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :citeproc } - get "/application/vnd.jats+xml/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :jats } - get "/application/x-bibtex/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :bibtex } - get "/application/x-research-info-systems/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :ris } - get "/text/csv/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :csv } - get "/text/x-bibliography/:id", to: "index#show", constraints: { id: /.+/ }, defaults: { format: :citation } + get "/application/vnd.datacite.datacite+xml/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :datacite } + get "/application/vnd.datacite.datacite+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :datacite_json } + get "/application/vnd.crosscite.crosscite+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :crosscite } + get "/application/vnd.schemaorg.ld+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :schema_org } + get "/application/ld+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :schema_org } + get "/application/vnd.codemeta.ld+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :codemeta } + get "/application/vnd.citationstyles.csl+json/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :citeproc } + get "/application/vnd.jats+xml/:id", + to: "index#show", constraints: { id: /.+/ }, defaults: { format: :jats } + get "/application/x-bibtex/:id", + to: "index#show", constraints: { id: /.+/ }, defaults: { format: :bibtex } + get "/application/x-research-info-systems/:id", + to: "index#show", constraints: { id: /.+/ }, defaults: { format: :ris } + get "/text/csv/:id", + to: "index#show", constraints: { id: /.+/ }, defaults: { format: :csv } + get "/text/x-bibliography/:id", + to: "index#show", + constraints: { id: /.+/ }, + defaults: { format: :citation } # content negotiation - get "/dois/application/vnd.datacite.datacite+xml/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :datacite } - get "/dois/application/vnd.datacite.datacite+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :datacite_json } - get "/dois/application/vnd.crosscite.crosscite+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :crosscite } - get "/dois/application/vnd.schemaorg.ld+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :schema_org } - get "/dois/application/ld+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :schema_org } - get "/dois/application/vnd.codemeta.ld+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :codemeta } - get "/dois/application/vnd.citationstyles.csl+json/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :citeproc } - get "/dois/application/vnd.jats+xml/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :jats } - get "/dois/application/x-bibtex/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :bibtex } - get "/dois/application/x-research-info-systems/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :ris } - get "/dois/text/csv/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :csv } - get "/dois/text/x-bibliography/:id", to: "datacite_dois#show", constraints: { id: /.+/ }, defaults: { format: :citation } + get "/dois/application/vnd.datacite.datacite+xml/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :datacite } + get "/dois/application/vnd.datacite.datacite+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :datacite_json } + get "/dois/application/vnd.crosscite.crosscite+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :crosscite } + get "/dois/application/vnd.schemaorg.ld+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :schema_org } + get "/dois/application/ld+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :schema_org } + get "/dois/application/vnd.codemeta.ld+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :codemeta } + get "/dois/application/vnd.citationstyles.csl+json/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :citeproc } + get "/dois/application/vnd.jats+xml/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :jats } + get "/dois/application/x-bibtex/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :bibtex } + get "/dois/application/x-research-info-systems/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :ris } + get "/dois/text/csv/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :csv } + get "/dois/text/x-bibliography/:id", + to: "datacite_dois#show", + constraints: { id: /.+/ }, + defaults: { format: :citation } # content negotiation for collections - get "/dois/application/vnd.datacite.datacite+xml", to: "datacite_dois#index", defaults: { format: :datacite } - get "/dois/application/vnd.datacite.datacite+json", to: "datacite_dois#index", defaults: { format: :datacite_json } - get "/dois/application/vnd.crosscite.crosscite+json", to: "datacite_dois#index", defaults: { format: :crosscite } - get "/dois/application/vnd.schemaorg.ld+json", to: "datacite_dois#index", defaults: { format: :schema_org } - get "/dois/application/ld+json", to: "datacite_dois#index", defaults: { format: :schema_org } - get "/dois/application/vnd.codemeta.ld+json", to: "datacite_dois#index", defaults: { format: :codemeta } - get "/dois/application/vnd.citationstyles.csl+json", to: "datacite_dois#index", defaults: { format: :citeproc } - get "/dois/application/vnd.jats+xml", to: "datacite_dois#index", defaults: { format: :jats } - get "/dois/application/x-bibtex", to: "datacite_dois#index", defaults: { format: :bibtex } - get "/dois/application/x-research-info-systems", to: "datacite_dois#index", defaults: { format: :ris } + get "/dois/application/vnd.datacite.datacite+xml", + to: "datacite_dois#index", defaults: { format: :datacite } + get "/dois/application/vnd.datacite.datacite+json", + to: "datacite_dois#index", defaults: { format: :datacite_json } + get "/dois/application/vnd.crosscite.crosscite+json", + to: "datacite_dois#index", defaults: { format: :crosscite } + get "/dois/application/vnd.schemaorg.ld+json", + to: "datacite_dois#index", defaults: { format: :schema_org } + get "/dois/application/ld+json", + to: "datacite_dois#index", defaults: { format: :schema_org } + get "/dois/application/vnd.codemeta.ld+json", + to: "datacite_dois#index", defaults: { format: :codemeta } + get "/dois/application/vnd.citationstyles.csl+json", + to: "datacite_dois#index", defaults: { format: :citeproc } + get "/dois/application/vnd.jats+xml", + to: "datacite_dois#index", defaults: { format: :jats } + get "/dois/application/x-bibtex", + to: "datacite_dois#index", defaults: { format: :bibtex } + get "/dois/application/x-research-info-systems", + to: "datacite_dois#index", defaults: { format: :ris } get "/dois/text/csv", to: "datacite_dois#index", defaults: { format: :csv } - get "/dois/text/x-bibliography", to: "datacite_dois#index", defaults: { format: :citation } + get "/dois/text/x-bibliography", + to: "datacite_dois#index", defaults: { format: :citation } get "/providers/text/csv", to: "providers#index", defaults: { format: :csv } get "providers/random", to: "providers#random" get "repositories/random", to: "repositories#random" - get "/organizations/text/csv", to: "organizations#index", defaults: { format: :csv } - get "/repositories/text/csv", to: "repositories#index", defaults: { format: :csv } + get "/organizations/text/csv", + to: "organizations#index", defaults: { format: :csv } + get "/repositories/text/csv", + to: "repositories#index", defaults: { format: :csv } # manage DOIs post "dois/validate", to: "datacite_dois#validate" @@ -79,14 +160,17 @@ get "prefixes/totals", to: "prefixes#totals" get "/providers/:id/stats", to: "providers#stats" get "/clients/:id/stats", to: "clients#stats", constraints: { id: /.+/ } - get "/repositories/:id/stats", to: "repositories#stats", constraints: { id: /.+/ } + get "/repositories/:id/stats", + to: "repositories#stats", constraints: { id: /.+/ } # Reporting - get "export/organizations", to: "exports#organizations", defaults: { format: :csv } - get "export/repositories", to: "exports#repositories", defaults: { format: :csv } + get "export/organizations", + to: "exports#organizations", defaults: { format: :csv } + get "export/repositories", + to: "exports#repositories", defaults: { format: :csv } get "export/contacts", to: "exports#contacts", defaults: { format: :csv } - resources :heartbeat, only: [:index] + resources :heartbeat, only: %i[index] resources :activities, only: %i[index show] @@ -119,7 +203,7 @@ resources :prefixes, constraints: { id: /.+/ } resources :provider_prefixes, path: "provider-prefixes" - resources :random, only: [:index] + resources :random, only: %i[index] resources :providers do resources :clients, constraints: { id: /.+/ }, shallow: true @@ -138,11 +222,17 @@ # support for legacy routes resources :members, only: %i[show index] - resources :data_centers, only: %i[show index], constraints: { id: /.+/ }, path: "/data-centers" + resources :data_centers, + only: %i[show index], + constraints: { id: /.+/ }, + path: "/data-centers" resources :works, only: %i[show index], constraints: { id: /.+/ } # content negotiation - resources :index, path: "/", only: %i[show index], constraints: { id: /.+/, format: false } + resources :index, + path: "/", + only: %i[show index], + constraints: { id: /.+/, format: false } root to: "index#index" diff --git a/config/spring.rb b/config/spring.rb index 9fa7863f9..c5933e491 100644 --- a/config/spring.rb +++ b/config/spring.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + %w[ .ruby-version .rbenv-vars diff --git a/db/migrate/20170807091814_create_all_tables.rb b/db/migrate/20170807091814_create_all_tables.rb index 7282e752c..a064c7b46 100644 --- a/db/migrate/20170807091814_create_all_tables.rb +++ b/db/migrate/20170807091814_create_all_tables.rb @@ -2,7 +2,9 @@ class CreateAllTables < ActiveRecord::Migration[5.1] def change - create_table "allocator", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "allocator", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.string "contact_email", null: false t.string "contact_name", limit: 80, null: false t.datetime "created" @@ -15,22 +17,26 @@ def change t.string "symbol", null: false t.datetime "updated" t.integer "version" - t.text "comments", limit: 4294967295 + t.text "comments", limit: 4_294_967_295 t.string "experiments" - t.index ["symbol"], name: "symbol", unique: true + t.index %w[symbol], name: "symbol", unique: true end - create_table "allocator_prefixes", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "allocator_prefixes", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "allocator", limit: 8, null: false t.integer "prefixes", limit: 8, null: false t.datetime "created" t.datetime "updated" - t.index ["allocator"], name: "FKE7FBD67446EBD781" - t.index ["prefixes"], name: "FKE7FBD674AF86A1C7" + t.index %w[allocator], name: "FKE7FBD67446EBD781" + t.index %w[prefixes], name: "FKE7FBD674AF86A1C7" end - create_table "datacentre", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| - t.text "comments", limit: 4294967295 + create_table "datacentre", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + t.text "comments", limit: 4_294_967_295 t.string "contact_email", null: false t.string "contact_name", limit: 80, null: false t.datetime "created" @@ -46,20 +52,24 @@ def change t.integer "version" t.integer "allocator", limit: 8, null: false t.string "experiments" - t.index ["allocator"], name: "FK6695D60546EBD781" - t.index ["symbol"], name: "symbol", unique: true + t.index %w[allocator], name: "FK6695D60546EBD781" + t.index %w[symbol], name: "symbol", unique: true end - create_table "datacentre_prefixes", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "datacentre_prefixes", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.integer "datacentre", limit: 8, null: false t.integer "prefixes", limit: 8, null: false t.datetime "created" t.datetime "updated" - t.index ["datacentre"], name: "FK13A1B3BA47B5F5FF" - t.index ["prefixes"], name: "FK13A1B3BAAF86A1C7" + t.index %w[datacentre], name: "FK13A1B3BA47B5F5FF" + t.index %w[prefixes], name: "FK13A1B3BAAF86A1C7" end - create_table "dataset", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "dataset", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.datetime "created" t.string "doi", null: false t.binary "is_active", limit: 1, null: false @@ -71,38 +81,44 @@ def change t.integer "version" t.integer "datacentre", limit: 8, null: false t.datetime "minted" - t.index ["datacentre"], name: "FK5605B47847B5F5FF" - t.index ["doi"], name: "doi", unique: true + t.index %w[datacentre], name: "FK5605B47847B5F5FF" + t.index %w[doi], name: "doi", unique: true end - create_table "media", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "media", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.datetime "created" t.string "media_type", limit: 80 t.datetime "updated" t.string "url", null: false t.integer "version" t.integer "dataset", limit: 8, null: false - t.index ["dataset", "updated"], name: "dataset_updated" - t.index ["dataset"], name: "FK62F6FE44D3D6B1B" + t.index %w[dataset updated], name: "dataset_updated" + t.index %w[dataset], name: "FK62F6FE44D3D6B1B" end - create_table "metadata", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "metadata", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.datetime "created" t.integer "metadata_version" t.integer "version" - t.binary "xml", limit: 16777215 + t.binary "xml", limit: 16_777_215 t.integer "dataset", limit: 8, null: false t.binary "is_converted_by_mds", limit: 1 t.string "namespace" - t.index ["dataset", "metadata_version"], name: "dataset_version" - t.index ["dataset"], name: "FKE52D7B2F4D3D6B1B" + t.index %w[dataset metadata_version], name: "dataset_version" + t.index %w[dataset], name: "FKE52D7B2F4D3D6B1B" end - create_table "prefix", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| + create_table "prefix", + force: :cascade, + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t| t.datetime "created" t.string "prefix", limit: 80, null: false t.integer "version" - t.index ["prefix"], name: "prefix", unique: true + t.index %w[prefix], name: "prefix", unique: true end # add_foreign_key "allocator_prefixes", "allocator", column: "allocator", name: "FKE7FBD67446EBD781" diff --git a/db/migrate/20170916141643_add_allocator_prefixes_column.rb b/db/migrate/20170916141643_add_allocator_prefixes_column.rb index 16034be01..1217ba52d 100644 --- a/db/migrate/20170916141643_add_allocator_prefixes_column.rb +++ b/db/migrate/20170916141643_add_allocator_prefixes_column.rb @@ -3,7 +3,7 @@ class AddAllocatorPrefixesColumn < ActiveRecord::Migration[5.1] def change add_column :datacentre_prefixes, :allocator_prefixes, :integer, limit: 8 - add_index :datacentre_prefixes, [:allocator_prefixes] + add_index :datacentre_prefixes, %i[allocator_prefixes] rename_column :datacentre_prefixes, :created, :created_at rename_column :datacentre_prefixes, :updated, :updated_at rename_column :allocator_prefixes, :created, :created_at diff --git a/db/migrate/20170926083943_add_url_index.rb b/db/migrate/20170926083943_add_url_index.rb index 87a920fcc..b27507213 100644 --- a/db/migrate/20170926083943_add_url_index.rb +++ b/db/migrate/20170926083943_add_url_index.rb @@ -4,6 +4,6 @@ class AddUrlIndex < ActiveRecord::Migration[5.1] def change add_column :dataset, :last_landing_page, :string add_column :dataset, :last_landing_page_content_type, :string - add_index :dataset, [:url] + add_index :dataset, %i[url] end end diff --git a/db/migrate/20170928202815_addre3data_column.rb b/db/migrate/20170928202815_addre3data_column.rb index e51bfc8af..411b764d6 100644 --- a/db/migrate/20170928202815_addre3data_column.rb +++ b/db/migrate/20170928202815_addre3data_column.rb @@ -3,6 +3,6 @@ class Addre3dataColumn < ActiveRecord::Migration[5.1] def change add_column :datacentre, :re3data, :string - add_index :datacentre, [:re3data] + add_index :datacentre, %i[re3data] end end diff --git a/db/migrate/20171109120529_add_aasm_column.rb b/db/migrate/20171109120529_add_aasm_column.rb index 9b7b46fb0..2b2f5bdaf 100644 --- a/db/migrate/20171109120529_add_aasm_column.rb +++ b/db/migrate/20171109120529_add_aasm_column.rb @@ -3,7 +3,7 @@ class AddAasmColumn < ActiveRecord::Migration[5.1] def self.up add_column :dataset, :state, :string, default: "draft" - add_index :dataset, [:state] + add_index :dataset, %i[state] end def self.down diff --git a/db/migrate/20171202002420_rename_state_column.rb b/db/migrate/20171202002420_rename_state_column.rb index ed87516e0..4e6501f47 100644 --- a/db/migrate/20171202002420_rename_state_column.rb +++ b/db/migrate/20171202002420_rename_state_column.rb @@ -4,12 +4,12 @@ class RenameStateColumn < ActiveRecord::Migration[5.1] def self.up remove_column :dataset, :state add_column :dataset, :aasm_state, :string - add_index :dataset, [:aasm_state] + add_index :dataset, %i[aasm_state] end def self.down remove_column :dataset, :aasm_state add_column :dataset, :state, :string, default: "draft" - add_index :dataset, [:state] + add_index :dataset, %i[state] end end diff --git a/db/migrate/20171202090754_change_url_column_type.rb b/db/migrate/20171202090754_change_url_column_type.rb index 39a6795b0..5ed743027 100644 --- a/db/migrate/20171202090754_change_url_column_type.rb +++ b/db/migrate/20171202090754_change_url_column_type.rb @@ -3,7 +3,7 @@ class ChangeUrlColumnType < ActiveRecord::Migration[5.1] def up remove_index :dataset, name: "index_dataset_on_url", column: :url - change_column :dataset, :url, :text, limit: 65535 + change_column :dataset, :url, :text, limit: 65_535 add_index :dataset, :url, name: "index_dataset_on_url", length: 100 end diff --git a/db/migrate/20180116230054_add_client_url.rb b/db/migrate/20180116230054_add_client_url.rb index c81987dbc..296268d97 100644 --- a/db/migrate/20180116230054_add_client_url.rb +++ b/db/migrate/20180116230054_add_client_url.rb @@ -2,7 +2,7 @@ class AddClientUrl < ActiveRecord::Migration[5.1] def change - add_column :datacentre, :url, :text, limit: 65535 + add_column :datacentre, :url, :text, limit: 65_535 add_index :datacentre, :url, name: "index_datacentre_on_url", length: 100 end end diff --git a/db/migrate/20180306172317_add_json_column.rb b/db/migrate/20180306172317_add_json_column.rb index 42c59f467..434a0bc7b 100644 --- a/db/migrate/20180306172317_add_json_column.rb +++ b/db/migrate/20180306172317_add_json_column.rb @@ -2,6 +2,6 @@ class AddJsonColumn < ActiveRecord::Migration[5.1] def change - add_column :dataset, :crosscite, :text, limit: 16777215 + add_column :dataset, :crosscite, :text, limit: 16_777_215 end end diff --git a/db/migrate/20180310064742_landing_page_url_as_text.rb b/db/migrate/20180310064742_landing_page_url_as_text.rb index 796c5612c..0de5e6307 100644 --- a/db/migrate/20180310064742_landing_page_url_as_text.rb +++ b/db/migrate/20180310064742_landing_page_url_as_text.rb @@ -2,14 +2,22 @@ class LandingPageUrlAsText < ActiveRecord::Migration[5.1] def up - change_column :dataset, :last_landing_page, :text, limit: 65535 - add_index :dataset, :last_landing_page_status, name: "index_dataset_on_last_landing_page_status" - add_index :dataset, :last_landing_page_content_type, name: "index_dataset_on_last_landing_page_content_type" + change_column :dataset, :last_landing_page, :text, limit: 65_535 + add_index :dataset, + :last_landing_page_status, + name: "index_dataset_on_last_landing_page_status" + add_index :dataset, + :last_landing_page_content_type, + name: "index_dataset_on_last_landing_page_content_type" end def down - remove_index :dataset, name: "index_dataset_on_last_landing_page_status", column: :last_landing_page_status - remove_index :dataset, name: "index_dataset_on_last_landing_page_content_type", column: :last_landing_page_content_type + remove_index :dataset, + name: "index_dataset_on_last_landing_page_status", + column: :last_landing_page_status + remove_index :dataset, + name: "index_dataset_on_last_landing_page_content_type", + column: :last_landing_page_content_type change_column :dataset, :last_landing_page, :string end end diff --git a/db/migrate/20180330040550_add_institution_type_column.rb b/db/migrate/20180330040550_add_institution_type_column.rb index 83cc34ed7..5ee11e5c3 100644 --- a/db/migrate/20180330040550_add_institution_type_column.rb +++ b/db/migrate/20180330040550_add_institution_type_column.rb @@ -4,10 +4,12 @@ class AddInstitutionTypeColumn < ActiveRecord::Migration[5.1] def up add_column :allocator, :joined, :date remove_column :allocator, :year, :integer - change_column :allocator, :description, :text, limit: 65535 + change_column :allocator, :description, :text, limit: 65_535 add_column :allocator, :institution_type, :string, limit: 191 - add_index :allocator, [:institution_type], name: "index_member_institution_type" + add_index :allocator, + %i[institution_type], + name: "index_member_institution_type" end def down diff --git a/db/migrate/20180330175050_create_active_storage_tables.active_storage.rb b/db/migrate/20180330175050_create_active_storage_tables.active_storage.rb index e0d6a8775..cc3f7ef81 100644 --- a/db/migrate/20180330175050_create_active_storage_tables.active_storage.rb +++ b/db/migrate/20180330175050_create_active_storage_tables.active_storage.rb @@ -6,25 +6,26 @@ def change add_column :allocator, :logo, :string create_table :active_storage_blobs do |t| - t.string :key, null: false, limit: 191 - t.string :filename, null: false, limit: 191 - t.string :content_type, limit: 191 - t.text :metadata - t.bigint :byte_size, null: false - t.string :checksum, null: false, limit: 191 + t.string :key, null: false, limit: 191 + t.string :filename, null: false, limit: 191 + t.string :content_type, limit: 191 + t.text :metadata + t.bigint :byte_size, null: false + t.string :checksum, null: false, limit: 191 t.datetime :created_at, null: false - t.index [:key], unique: true + t.index %i[key], unique: true end create_table :active_storage_attachments do |t| - t.string :name, null: false, limit: 191 - t.references :record, null: false, polymorphic: true, index: false - t.references :blob, null: false + t.string :name, null: false, limit: 191 + t.references :record, null: false, polymorphic: true, index: false + t.references :blob, null: false t.datetime :created_at, null: false - t.index %i[record_type record_id name blob_id], name: "index_active_storage_attachments_uniqueness", unique: true + t.index %i[record_type record_id name blob_id], + name: "index_active_storage_attachments_uniqueness", unique: true end end end diff --git a/db/migrate/20180505084805_remove_crosscite_column.rb b/db/migrate/20180505084805_remove_crosscite_column.rb index dbde0c6bf..3ea279821 100644 --- a/db/migrate/20180505084805_remove_crosscite_column.rb +++ b/db/migrate/20180505084805_remove_crosscite_column.rb @@ -3,6 +3,6 @@ class RemoveCrossciteColumn < ActiveRecord::Migration[5.2] def change remove_column :dataset, :from, :string - remove_column :dataset, :crosscite, :text, limit: 16777215 + remove_column :dataset, :crosscite, :text, limit: 16_777_215 end end diff --git a/db/migrate/20180731090122_add_source_column.rb b/db/migrate/20180731090122_add_source_column.rb index 6fedf056b..57861da0f 100644 --- a/db/migrate/20180731090122_add_source_column.rb +++ b/db/migrate/20180731090122_add_source_column.rb @@ -3,6 +3,6 @@ class AddSourceColumn < ActiveRecord::Migration[5.2] def change add_column :dataset, :source, :string, limit: 191 - add_index :dataset, [:source], name: "index_dataset_source" + add_index :dataset, %i[source], name: "index_dataset_source" end end diff --git a/db/migrate/20180904161700_add_last_landing_page_status_result_column.rb b/db/migrate/20180904161700_add_last_landing_page_status_result_column.rb index 69df20d74..ebe8ef274 100644 --- a/db/migrate/20180904161700_add_last_landing_page_status_result_column.rb +++ b/db/migrate/20180904161700_add_last_landing_page_status_result_column.rb @@ -2,6 +2,9 @@ class AddLastLandingPageStatusResultColumn < ActiveRecord::Migration[5.2] def change - add_column :dataset, :last_landing_page_status_result, :json, after: :last_landing_page_status_check + add_column :dataset, + :last_landing_page_status_result, + :json, + after: :last_landing_page_status_check end end diff --git a/db/migrate/20181015152049_microseconds_in_time_columns.rb b/db/migrate/20181015152049_microseconds_in_time_columns.rb index 80b2a7a4d..e74b82b13 100644 --- a/db/migrate/20181015152049_microseconds_in_time_columns.rb +++ b/db/migrate/20181015152049_microseconds_in_time_columns.rb @@ -5,12 +5,19 @@ def up change_column :dataset, :created, :datetime, limit: 3 change_column :dataset, :updated, :datetime, limit: 3 - add_column :dataset, :indexed, :datetime, limit: 3, default: "1970-01-01 00:00:00", null: false - add_index "dataset", ["created", "indexed", "updated"], name: "index_dataset_on_created_indexed_updated" + add_column :dataset, + :indexed, + :datetime, + limit: 3, default: "1970-01-01 00:00:00", null: false + add_index "dataset", + %w[created indexed updated], + name: "index_dataset_on_created_indexed_updated" end def down - remove_index :dataset, column: ["created", "indexed", "updated"], name: "index_dataset_on_created_indexed_updated" + remove_index :dataset, + column: %w[created indexed updated], + name: "index_dataset_on_created_indexed_updated" remove_column :dataset, :indexed change_column :dataset, :created, :datetime diff --git a/db/migrate/20181023235649_add_focus_area.rb b/db/migrate/20181023235649_add_focus_area.rb index 66c463e77..2095b314b 100644 --- a/db/migrate/20181023235649_add_focus_area.rb +++ b/db/migrate/20181023235649_add_focus_area.rb @@ -6,12 +6,16 @@ def up add_column :allocator, :focus_area, :string, limit: 191 add_column :allocator, :organization_type, :string, limit: 191 - add_index :allocator, [:organization_type], name: "index_allocator_organization_type" + add_index :allocator, + %i[organization_type], + name: "index_allocator_organization_type" end def down add_column :allocator, :institution_type, :string, limit: 191 - add_index :allocator, [:institution_type], name: "index_member_institution_type" + add_index :allocator, + %i[institution_type], + name: "index_member_institution_type" remove_column :allocator, :focus_area, :string, limit: 191 remove_column :allocator, :organization_type, :string, limit: 191 diff --git a/db/migrate/20181102094810_add_schema_attributes.rb b/db/migrate/20181102094810_add_schema_attributes.rb index 042711ab8..3735ca078 100644 --- a/db/migrate/20181102094810_add_schema_attributes.rb +++ b/db/migrate/20181102094810_add_schema_attributes.rb @@ -23,6 +23,6 @@ def change add_column :dataset, :subjects, :json add_column :dataset, :schema_version, :string, limit: 191 add_column :dataset, :content_url, :json - add_column :dataset, :xml, :binary, limit: 16777215 + add_column :dataset, :xml, :binary, limit: 16_777_215 end end diff --git a/db/migrate/20181216071910_schema_version_index.rb b/db/migrate/20181216071910_schema_version_index.rb index e9ad2d987..067280aa9 100644 --- a/db/migrate/20181216071910_schema_version_index.rb +++ b/db/migrate/20181216071910_schema_version_index.rb @@ -2,6 +2,6 @@ class SchemaVersionIndex < ActiveRecord::Migration[5.2] def change - add_index :dataset, [:schema_version] + add_index :dataset, %i[schema_version] end end diff --git a/db/migrate/20190302161113_install_audited.rb b/db/migrate/20190302161113_install_audited.rb index 26089d602..778c4e524 100644 --- a/db/migrate/20190302161113_install_audited.rb +++ b/db/migrate/20190302161113_install_audited.rb @@ -19,8 +19,12 @@ def self.up t.column :created_at, :datetime, limit: 3 end - add_index :audits, %i[auditable_type auditable_id version], name: "auditable_index" - add_index :audits, %i[associated_type associated_id], name: "associated_index" + add_index :audits, + %i[auditable_type auditable_id version], + name: "auditable_index" + add_index :audits, + %i[associated_type associated_id], + name: "associated_index" add_index :audits, %i[user_id user_type], name: "user_index" add_index :audits, :request_uuid add_index :audits, :created_at diff --git a/db/migrate/20190409211358_change_media_url_column_type.rb b/db/migrate/20190409211358_change_media_url_column_type.rb index 55387b15e..cfd399ad2 100644 --- a/db/migrate/20190409211358_change_media_url_column_type.rb +++ b/db/migrate/20190409211358_change_media_url_column_type.rb @@ -2,7 +2,7 @@ class ChangeMediaUrlColumnType < ActiveRecord::Migration[5.2] def up - change_column :media, :url, :text, limit: 65535 + change_column :media, :url, :text, limit: 65_535 add_index :media, :url, name: "index_media_on_url", length: 100 end diff --git a/db/migrate/20190604093226_add_events_table.rb b/db/migrate/20190604093226_add_events_table.rb index 3f929806e..93d2f64f4 100644 --- a/db/migrate/20190604093226_add_events_table.rb +++ b/db/migrate/20190604093226_add_events_table.rb @@ -2,7 +2,9 @@ class AddEventsTable < ActiveRecord::Migration[5.2] def change - create_table "events", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4", force: :cascade do |t| + create_table "events", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4", + force: :cascade do |t| t.text "uuid", null: false t.text "subj_id", null: false t.text "obj_id" @@ -18,15 +20,20 @@ def change t.datetime "occurred_at" t.string "message_action", limit: 191, default: "create", null: false t.string "relation_type_id", limit: 191 - t.text "subj", limit: 16777215 - t.text "obj", limit: 16777215 + t.text "subj", limit: 16_777_215 + t.text "obj", limit: 16_777_215 t.integer "total", default: 1 t.string "license", limit: 191 - t.index ["created_at", "indexed_at", "updated_at"], name: "index_events_on_created_indexed_updated" - t.index ["source_id", "created_at"], name: "index_events_on_source_id_created_at" - t.index ["subj_id", "obj_id", "source_id", "relation_type_id"], name: "index_events_on_multiple_columns", unique: true, length: { subj_id: 191, obj_id: 191 } - t.index ["updated_at"], name: "index_events_on_updated_at" - t.index ["uuid"], name: "index_events_on_uuid", unique: true, length: 36 + t.index %w[created_at indexed_at updated_at], + name: "index_events_on_created_indexed_updated" + t.index %w[source_id created_at], + name: "index_events_on_source_id_created_at" + t.index %w[subj_id obj_id source_id relation_type_id], + name: "index_events_on_multiple_columns", + unique: true, + length: { subj_id: 191, obj_id: 191 } + t.index %w[updated_at], name: "index_events_on_updated_at" + t.index %w[uuid], name: "index_events_on_uuid", unique: true, length: 36 end end end diff --git a/db/migrate/20190727170040_add_client_fields.rb b/db/migrate/20190727170040_add_client_fields.rb index 5ef98ccb3..792fce74e 100644 --- a/db/migrate/20190727170040_add_client_fields.rb +++ b/db/migrate/20190727170040_add_client_fields.rb @@ -2,9 +2,9 @@ class AddClientFields < ActiveRecord::Migration[5.2] def change - remove_index :datacentre, [:re3data] + remove_index :datacentre, %i[re3data] rename_column :datacentre, :re3data, :re3data_id - add_index :datacentre, [:re3data_id] + add_index :datacentre, %i[re3data_id] add_column :datacentre, :issn, :json add_column :datacentre, :certificate, :json diff --git a/db/migrate/20200122153731_add_globus_uuid.rb b/db/migrate/20200122153731_add_globus_uuid.rb index 87bc581d1..64f3e4180 100644 --- a/db/migrate/20200122153731_add_globus_uuid.rb +++ b/db/migrate/20200122153731_add_globus_uuid.rb @@ -5,7 +5,13 @@ def change add_column :datacentre, :globus_uuid, :string, limit: 191 add_column :allocator, :globus_uuid, :string, limit: 191 - add_index :datacentre, [:globus_uuid], name: "index_datacentre_on_globus_uuid", length: { globus_uuid: 191 } - add_index :allocator, [:globus_uuid], name: "index_allocator_on_globus_uuid", length: { globus_uuid: 191 } + add_index :datacentre, + %i[globus_uuid], + name: "index_datacentre_on_globus_uuid", + length: { globus_uuid: 191 } + add_index :allocator, + %i[globus_uuid], + name: "index_allocator_on_globus_uuid", + length: { globus_uuid: 191 } end end diff --git a/db/migrate/20200131180609_add_events_properties.rb b/db/migrate/20200131180609_add_events_properties.rb index 772a3816d..996ffdca2 100644 --- a/db/migrate/20200131180609_add_events_properties.rb +++ b/db/migrate/20200131180609_add_events_properties.rb @@ -6,8 +6,12 @@ def up add_column :events, :target_doi, :text add_column :events, :source_relation_type_id, :string, limit: 191 add_column :events, :target_relation_type_id, :string, limit: 191 - add_index :events, %i[source_doi source_relation_type_id], name: "index_events_on_source_doi", length: { source_doi: 100 } - add_index :events, %i[target_doi target_relation_type_id], name: "index_events_on_target_doi", length: { target_doi: 100 } + add_index :events, + %i[source_doi source_relation_type_id], + name: "index_events_on_source_doi", length: { source_doi: 100 } + add_index :events, + %i[target_doi target_relation_type_id], + name: "index_events_on_target_doi", length: { target_doi: 100 } end def down diff --git a/db/migrate/20200302191027_add_attachment_logo_to_providers.rb b/db/migrate/20200302191027_add_attachment_logo_to_providers.rb index 72503e464..7c0e47d0e 100644 --- a/db/migrate/20200302191027_add_attachment_logo_to_providers.rb +++ b/db/migrate/20200302191027_add_attachment_logo_to_providers.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class AddAttachmentLogoToProviders < ActiveRecord::Migration[5.2] def self.up change_table :allocator do |t| diff --git a/db/migrate/20200313163242_rename_prefix_tables.rb b/db/migrate/20200313163242_rename_prefix_tables.rb index 8c5ce3fe6..2aac0847b 100644 --- a/db/migrate/20200313163242_rename_prefix_tables.rb +++ b/db/migrate/20200313163242_rename_prefix_tables.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + class RenamePrefixTables < ActiveRecord::Migration[5.2] def change rename_column :prefix, :created, :created_at @@ -10,13 +12,13 @@ def change rename_column :allocator_prefixes, :prefixes, :prefix_id add_column :allocator_prefixes, :uid, :string, null: false rename_table :allocator_prefixes, :provider_prefixes - add_index :provider_prefixes, [:uid], length: 128 + add_index :provider_prefixes, %i[uid], length: 128 rename_column :datacentre_prefixes, :datacentre, :client_id rename_column :datacentre_prefixes, :prefixes, :prefix_id rename_column :datacentre_prefixes, :allocator_prefixes, :provider_prefix_id add_column :datacentre_prefixes, :uid, :string, null: false rename_table :datacentre_prefixes, :client_prefixes - add_index :client_prefixes, [:uid], length: 128 + add_index :client_prefixes, %i[uid], length: 128 end end diff --git a/db/migrate/20200826173254_add_agency_index.rb b/db/migrate/20200826173254_add_agency_index.rb index 000eb681f..80c6b6b99 100644 --- a/db/migrate/20200826173254_add_agency_index.rb +++ b/db/migrate/20200826173254_add_agency_index.rb @@ -1,6 +1,10 @@ +# frozen_string_literal: true + class AddAgencyIndex < ActiveRecord::Migration[5.2] def up - add_index :dataset, [:type], name: "index_dataset_on_type", length: { type: 16 } + add_index :dataset, + %i[type], + name: "index_dataset_on_type", length: { type: 16 } end def down diff --git a/db/migrate/20201019125327_change_domains_column.rb b/db/migrate/20201019125327_change_domains_column.rb index 67a3b81eb..a2ac66f72 100644 --- a/db/migrate/20201019125327_change_domains_column.rb +++ b/db/migrate/20201019125327_change_domains_column.rb @@ -1,6 +1,8 @@ +# frozen_string_literal: true + class ChangeDomainsColumn < ActiveRecord::Migration[5.2] def up - change_column :datacentre, :domains, :text, limit: 65535 + change_column :datacentre, :domains, :text, limit: 65_535 end def down diff --git a/db/schema.rb b/db/schema.rb index cf70e1b99..2012f05c9 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. @@ -11,18 +13,22 @@ # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2020_10_19_125327) do - - create_table "active_storage_attachments", options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", force: :cascade do |t| + create_table "active_storage_attachments", + options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", + force: :cascade do |t| t.string "name", limit: 191, null: false t.string "record_type", null: false t.bigint "record_id", null: false t.bigint "blob_id", null: false t.datetime "created_at", null: false - t.index ["blob_id"], name: "index_active_storage_attachments_on_blob_id" - t.index ["record_type", "record_id", "name", "blob_id"], name: "index_active_storage_attachments_uniqueness", unique: true + t.index %w[blob_id], name: "index_active_storage_attachments_on_blob_id" + t.index %w[record_type record_id name blob_id], + name: "index_active_storage_attachments_uniqueness", unique: true end - create_table "active_storage_blobs", options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", force: :cascade do |t| + create_table "active_storage_blobs", + options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", + force: :cascade do |t| t.string "key", limit: 191, null: false t.string "filename", limit: 191, null: false t.string "content_type", limit: 191 @@ -30,10 +36,12 @@ t.bigint "byte_size", null: false t.string "checksum", limit: 191, null: false t.datetime "created_at", null: false - t.index ["key"], name: "index_active_storage_blobs_on_key", unique: true + t.index %w[key], name: "index_active_storage_blobs_on_key", unique: true end - create_table "allocator", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "allocator", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.string "system_email", null: false t.datetime "created" t.integer "doi_quota_allowed", null: false @@ -45,7 +53,7 @@ t.string "symbol", null: false t.datetime "updated" t.integer "version" - t.text "comments", limit: 4294967295 + t.text "comments", limit: 4_294_967_295 t.string "experiments" t.text "description" t.string "region" @@ -77,13 +85,15 @@ t.bigint "logo_file_size" t.datetime "logo_updated_at" t.string "uid", limit: 32 - t.index ["globus_uuid"], name: "index_allocator_on_globus_uuid" - t.index ["organization_type"], name: "index_allocator_organization_type" - t.index ["symbol"], name: "symbol", unique: true - t.index ["uid"], name: "index_allocator_on_uid" + t.index %w[globus_uuid], name: "index_allocator_on_globus_uuid" + t.index %w[organization_type], name: "index_allocator_organization_type" + t.index %w[symbol], name: "symbol", unique: true + t.index %w[uid], name: "index_allocator_on_uid" end - create_table "audits", options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", force: :cascade do |t| + create_table "audits", + options: "ENGINE=InnoDB DEFAULT CHARSET=latin1", + force: :cascade do |t| t.integer "auditable_id" t.string "auditable_type" t.integer "associated_id" @@ -98,29 +108,36 @@ t.string "remote_address" t.string "request_uuid" t.datetime "created_at", precision: 3 - t.index ["associated_type", "associated_id"], name: "associated_index" - t.index ["auditable_type", "auditable_id", "version"], name: "auditable_index" - t.index ["created_at"], name: "index_audits_on_created_at" - t.index ["request_uuid"], name: "index_audits_on_request_uuid" - t.index ["user_id", "user_type"], name: "user_index" + t.index %w[associated_type associated_id], name: "associated_index" + t.index %w[auditable_type auditable_id version], name: "auditable_index" + t.index %w[created_at], name: "index_audits_on_created_at" + t.index %w[request_uuid], name: "index_audits_on_request_uuid" + t.index %w[user_id user_type], name: "user_index" end - create_table "client_prefixes", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "client_prefixes", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.bigint "client_id", null: false t.bigint "prefix_id", null: false t.datetime "created_at" t.datetime "updated_at" t.bigint "provider_prefix_id" t.string "uid" - t.index ["client_id", "prefix_id"], name: "index_client_prefixes_on_client_id_and_prefix_id", unique: true - t.index ["client_id"], name: "FK13A1B3BA47B5F5FF" - t.index ["prefix_id"], name: "FK13A1B3BAAF86A1C7" - t.index ["provider_prefix_id"], name: "index_client_prefixes_on_provider_prefix_id" - t.index ["uid"], name: "index_client_prefixes_on_uid", length: 128 + t.index %w[client_id prefix_id], + name: "index_client_prefixes_on_client_id_and_prefix_id", + unique: true + t.index %w[client_id], name: "FK13A1B3BA47B5F5FF" + t.index %w[prefix_id], name: "FK13A1B3BAAF86A1C7" + t.index %w[provider_prefix_id], + name: "index_client_prefixes_on_provider_prefix_id" + t.index %w[uid], name: "index_client_prefixes_on_uid", length: 128 end - create_table "datacentre", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| - t.text "comments", limit: 4294967295 + create_table "datacentre", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| + t.text "comments", limit: 4_294_967_295 t.string "system_email", null: false t.datetime "created" t.integer "doi_quota_allowed", null: false @@ -151,15 +168,17 @@ t.json "service_contact" t.string "globus_uuid", limit: 191 t.string "uid", limit: 32 - t.index ["allocator"], name: "FK6695D60546EBD781" - t.index ["globus_uuid"], name: "index_datacentre_on_globus_uuid" - t.index ["re3data_id"], name: "index_datacentre_on_re3data_id" - t.index ["symbol"], name: "symbol", unique: true - t.index ["uid"], name: "index_datacentre_on_uid" - t.index ["url"], name: "index_datacentre_on_url", length: 100 + t.index %w[allocator], name: "FK6695D60546EBD781" + t.index %w[globus_uuid], name: "index_datacentre_on_globus_uuid" + t.index %w[re3data_id], name: "index_datacentre_on_re3data_id" + t.index %w[symbol], name: "symbol", unique: true + t.index %w[uid], name: "index_datacentre_on_uid" + t.index %w[url], name: "index_datacentre_on_url", length: 100 end - create_table "dataset", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "dataset", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.datetime "created" t.string "doi", null: false t.binary "is_active", limit: 1, null: false @@ -178,7 +197,8 @@ t.string "aasm_state" t.string "reason" t.string "source", limit: 191 - t.datetime "indexed", precision: 3, default: "1970-01-01 00:00:00", null: false + t.datetime "indexed", + precision: 3, default: "1970-01-01 00:00:00", null: false t.json "creators" t.json "contributors" t.json "titles" @@ -200,23 +220,29 @@ t.json "subjects" t.string "schema_version", limit: 191 t.json "content_url" - t.binary "xml", limit: 16777215 + t.binary "xml", limit: 16_777_215 t.json "landing_page" t.string "agency", limit: 191, default: "datacite" t.string "type", limit: 16, default: "DataCiteDoi" - t.index ["aasm_state"], name: "index_dataset_on_aasm_state" - t.index ["created", "indexed", "updated"], name: "index_dataset_on_created_indexed_updated" - t.index ["datacentre"], name: "FK5605B47847B5F5FF" - t.index ["doi"], name: "doi", unique: true - t.index ["last_landing_page_content_type"], name: "index_dataset_on_last_landing_page_content_type" - t.index ["last_landing_page_status"], name: "index_dataset_on_last_landing_page_status" - t.index ["schema_version"], name: "index_dataset_on_schema_version" - t.index ["source"], name: "index_dataset_source" - t.index ["type"], name: "index_dataset_on_type" - t.index ["url"], name: "index_dataset_on_url", length: 100 + t.index %w[aasm_state], name: "index_dataset_on_aasm_state" + t.index %w[created indexed updated], + name: "index_dataset_on_created_indexed_updated" + t.index %w[datacentre], name: "FK5605B47847B5F5FF" + t.index %w[doi], name: "doi", unique: true + t.index %w[last_landing_page_content_type], + name: "index_dataset_on_last_landing_page_content_type" + t.index %w[last_landing_page_status], + name: "index_dataset_on_last_landing_page_status" + t.index %w[schema_version], name: "index_dataset_on_schema_version" + t.index %w[source], name: "index_dataset_source" + t.index %w[type], name: "index_dataset_on_type" + t.index %w[url], name: "index_dataset_on_url", length: 100 end - create_table "events", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin", force: :cascade do |t| + create_table "events", + options: + "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin", + force: :cascade do |t| t.text "uuid", null: false t.text "subj_id", null: false t.text "obj_id" @@ -232,72 +258,99 @@ t.datetime "occurred_at" t.string "message_action", limit: 191, default: "create", null: false t.string "relation_type_id", limit: 191 - t.text "subj", limit: 16777215 - t.text "obj", limit: 16777215 + t.text "subj", limit: 16_777_215 + t.text "obj", limit: 16_777_215 t.integer "total", default: 1 t.string "license", limit: 191 t.text "source_doi" t.text "target_doi" t.string "source_relation_type_id", limit: 191 t.string "target_relation_type_id", limit: 191 - t.index ["created_at", "indexed_at", "updated_at"], name: "index_events_on_created_indexed_updated" - t.index ["source_doi", "source_relation_type_id"], name: "index_events_on_source_doi", length: { source_doi: 100 } - t.index ["source_id", "created_at"], name: "index_events_on_source_id_created_at" - t.index ["subj_id", "obj_id", "source_id", "relation_type_id"], name: "index_events_on_multiple_columns", unique: true, length: { subj_id: 191, obj_id: 191 } - t.index ["target_doi", "target_relation_type_id"], name: "index_events_on_target_doi", length: { target_doi: 100 } - t.index ["updated_at"], name: "index_events_on_updated_at" - t.index ["uuid"], name: "index_events_on_uuid", unique: true, length: 36 + t.index %w[created_at indexed_at updated_at], + name: "index_events_on_created_indexed_updated" + t.index %w[source_doi source_relation_type_id], + name: "index_events_on_source_doi", length: { source_doi: 100 } + t.index %w[source_id created_at], + name: "index_events_on_source_id_created_at" + t.index %w[subj_id obj_id source_id relation_type_id], + name: "index_events_on_multiple_columns", + unique: true, + length: { subj_id: 191, obj_id: 191 } + t.index %w[target_doi target_relation_type_id], + name: "index_events_on_target_doi", length: { target_doi: 100 } + t.index %w[updated_at], name: "index_events_on_updated_at" + t.index %w[uuid], name: "index_events_on_uuid", unique: true, length: 36 end - create_table "media", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "media", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.datetime "created" t.string "media_type", limit: 80 t.datetime "updated" t.text "url", null: false t.integer "version" t.bigint "dataset", null: false - t.index ["dataset", "updated"], name: "dataset_updated" - t.index ["dataset"], name: "FK62F6FE44D3D6B1B" - t.index ["url"], name: "index_media_on_url", length: 100 + t.index %w[dataset updated], name: "dataset_updated" + t.index %w[dataset], name: "FK62F6FE44D3D6B1B" + t.index %w[url], name: "index_media_on_url", length: 100 end - create_table "metadata", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "metadata", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.datetime "created" t.integer "metadata_version" t.integer "version" - t.binary "xml", limit: 16777215 + t.binary "xml", limit: 16_777_215 t.bigint "dataset", null: false t.binary "is_converted_by_mds", limit: 1 t.string "namespace" - t.index ["dataset", "metadata_version"], name: "dataset_version" - t.index ["dataset"], name: "FKE52D7B2F4D3D6B1B" + t.index %w[dataset metadata_version], name: "dataset_version" + t.index %w[dataset], name: "FKE52D7B2F4D3D6B1B" end - create_table "prefixes", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "prefixes", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.datetime "created_at" t.string "uid", limit: 80, null: false t.string "ra", default: "DataCite" - t.index ["uid"], name: "prefix", unique: true + t.index %w[uid], name: "prefix", unique: true end - create_table "provider_prefixes", options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", force: :cascade do |t| + create_table "provider_prefixes", + options: "ENGINE=InnoDB DEFAULT CHARSET=utf8", + force: :cascade do |t| t.bigint "provider_id", null: false t.bigint "prefix_id", null: false t.datetime "created_at" t.datetime "updated_at" t.string "uuid" t.string "uid" - t.index ["prefix_id"], name: "FKE7FBD674AF86A1C7" - t.index ["provider_id", "prefix_id"], name: "index_provider_prefixes_on_provider_id_and_prefix_id", unique: true - t.index ["provider_id"], name: "FKE7FBD67446EBD781" - t.index ["uid"], name: "index_provider_prefixes_on_uid", length: 128 + t.index %w[prefix_id], name: "FKE7FBD674AF86A1C7" + t.index %w[provider_id prefix_id], + name: "index_provider_prefixes_on_provider_id_and_prefix_id", + unique: true + t.index %w[provider_id], name: "FKE7FBD67446EBD781" + t.index %w[uid], name: "index_provider_prefixes_on_uid", length: 128 end - add_foreign_key "client_prefixes", "datacentre", column: "client_id", name: "_FK13A1B3BA47B5F5FF" + add_foreign_key "client_prefixes", + "datacentre", + column: "client_id", name: "_FK13A1B3BA47B5F5FF" add_foreign_key "client_prefixes", "prefixes", name: "FK13A1B3BAAF86A1C7" - add_foreign_key "datacentre", "allocator", column: "allocator", name: "_FK6695D60546EBD781" - add_foreign_key "media", "dataset", column: "dataset", name: "FK62F6FE44D3D6B1B" - add_foreign_key "metadata", "dataset", column: "dataset", name: "FKE52D7B2F4D3D6B1B" - add_foreign_key "provider_prefixes", "allocator", column: "provider_id", name: "FKE7FBD67446EBD781" + add_foreign_key "datacentre", + "allocator", + column: "allocator", name: "_FK6695D60546EBD781" + add_foreign_key "media", + "dataset", + column: "dataset", name: "FK62F6FE44D3D6B1B" + add_foreign_key "metadata", + "dataset", + column: "dataset", name: "FKE52D7B2F4D3D6B1B" + add_foreign_key "provider_prefixes", + "allocator", + column: "provider_id", name: "FKE7FBD67446EBD781" add_foreign_key "provider_prefixes", "prefixes", name: "FKE7FBD674AF86A1C7" end diff --git a/db/seeds/development/base.seeds.rb b/db/seeds/development/base.seeds.rb index f66783e02..fa545f939 100644 --- a/db/seeds/development/base.seeds.rb +++ b/db/seeds/development/base.seeds.rb @@ -1,15 +1,35 @@ +# frozen_string_literal: true + require "factory_bot_rails" -fail "Seed tasks can only be used in the development enviroment" if Rails.env.production? -fail "You need to set up a MDS_USERNAME and MDS_PASSWORD" if ENV["MDS_USERNAME"].blank? || ENV["MDS_PASSWORD"].blank? +if Rails.env.production? + fail "Seed tasks can only be used in the development enviroment" +end +if ENV["MDS_USERNAME"].blank? || ENV["MDS_PASSWORD"].blank? + fail "You need to set up a MDS_USERNAME and MDS_PASSWORD" +end -FactoryBot.create(:provider, symbol: "ADMIN") if Provider.where(symbol: "ADMIN").blank? -provider = Provider.where(symbol: "DATACITE").first || FactoryBot.create(:provider, symbol: "DATACITE") -client = Client.where(symbol: "DATACITE.TEST").first || FactoryBot.create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) +if Provider.where(symbol: "ADMIN").blank? + FactoryBot.create(:provider, symbol: "ADMIN") +end +provider = + Provider.where(symbol: "DATACITE").first || + FactoryBot.create(:provider, symbol: "DATACITE") +client = + Client.where(symbol: "DATACITE.TEST").first || + FactoryBot.create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) if Prefix.where(uid: "10.14454").blank? prefix = FactoryBot.create(:prefix, uid: "10.14454") ### This creates both the client_prefix and the pprovider association - FactoryBot.create(:client_prefix, client_id: client.symbol, prefix_id: prefix.uid) + FactoryBot.create( + :client_prefix, + client_id: client.symbol, prefix_id: prefix.uid, + ) end dois = FactoryBot.create_list(:doi, 10, client: client, state: "findable") FactoryBot.create_list(:event_for_datacite_related, 3, obj_id: dois.first.doi) diff --git a/db/seeds/development/consortium_transfer.seeds.rb b/db/seeds/development/consortium_transfer.seeds.rb index 1d651aa14..828b707fd 100644 --- a/db/seeds/development/consortium_transfer.seeds.rb +++ b/db/seeds/development/consortium_transfer.seeds.rb @@ -1,15 +1,37 @@ +# frozen_string_literal: true + require "factory_bot_rails" -fail "Seed tasks can only be used in the development enviroment" if Rails.env.production? +if Rails.env.production? + fail "Seed tasks can only be used in the development enviroment" +end after "development:base" do - provider = Provider.where(symbol: "QUECHUA").first || FactoryBot.create(:provider, symbol: "QUECHUA") - client = Client.where(symbol: "QUECHUA.TEXT").first || FactoryBot.create(:client, provider: provider, symbol: "QUECHUA.TEXT", password: ENV["MDS_PASSWORD"]) + provider = + Provider.where(symbol: "QUECHUA").first || + FactoryBot.create(:provider, symbol: "QUECHUA") + client = + Client.where(symbol: "QUECHUA.TEXT").first || + FactoryBot.create( + :client, + provider: provider, + symbol: "QUECHUA.TEXT", + password: ENV["MDS_PASSWORD"], + ) if Prefix.where(uid: "10.14459").blank? prefix = FactoryBot.create(:prefix, uid: "10.14459") ## one needs to create the provider first so the assignation is made - provider_prefix_id = FactoryBot.create(:provider_prefix, provider_id: provider.symbol, prefix_id: prefix.uid) - FactoryBot.create(:client_prefix, client_id: client.symbol, prefix_id: prefix.uid, provider_prefix_id: provider_prefix_id.uid) + provider_prefix_id = + FactoryBot.create( + :provider_prefix, + provider_id: provider.symbol, prefix_id: prefix.uid, + ) + FactoryBot.create( + :client_prefix, + client_id: client.symbol, + prefix_id: prefix.uid, + provider_prefix_id: provider_prefix_id.uid, + ) end dois = FactoryBot.create_list(:doi, 10, client: client, state: "findable") FactoryBot.create_list(:event_for_datacite_related, 3, obj_id: dois.first.doi) diff --git a/db/seeds/development/researcher_profile.seeds.rb b/db/seeds/development/researcher_profile.seeds.rb index 05819ea0e..f1ce105db 100644 --- a/db/seeds/development/researcher_profile.seeds.rb +++ b/db/seeds/development/researcher_profile.seeds.rb @@ -1,13 +1,28 @@ +# frozen_string_literal: true + require "factory_bot_rails" -fail "Seed tasks can only be used in the development enviroment" if Rails.env.production? +if Rails.env.production? + fail "Seed tasks can only be used in the development enviroment" +end after "development:base" do client = Client.all.first dois = FactoryBot.create_list(:doi, 70, client: client, state: "findable") - FactoryBot.create_list(:event_for_datacite_related, 34, obj_id: dois.first.doi) + FactoryBot.create_list( + :event_for_datacite_related, + 34, + obj_id: dois.first.doi, + ) FactoryBot.create_list(:event_for_datacite_usage, 32, obj_id: dois.first.doi) - FactoryBot.create(:event_for_datacite_orcid_auto_update, subj_id: dois.first.doi, obj_id: "http://orcid.org/0000-0003-2926-8353") - FactoryBot.create_list(:event_for_datacite_orcid_auto_update, 5, obj_id: "http://orcid.org/0000-0003-2926-8353") + FactoryBot.create( + :event_for_datacite_orcid_auto_update, + subj_id: dois.first.doi, obj_id: "http://orcid.org/0000-0003-2926-8353", + ) + FactoryBot.create_list( + :event_for_datacite_orcid_auto_update, + 5, + obj_id: "http://orcid.org/0000-0003-2926-8353", + ) end diff --git a/spec/concerns/authenticable_spec.rb b/spec/concerns/authenticable_spec.rb index c9e0e5993..67289aca7 100644 --- a/spec/concerns/authenticable_spec.rb +++ b/spec/concerns/authenticable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe User, type: :model do @@ -85,128 +87,199 @@ it "staff_admin" do token = User.generate_token(role_id: "staff_admin") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "staff_user" do token = User.generate_token(role_id: "staff_user") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "consortium_admin" do - token = User.generate_token(role_id: "consortium_admin", provider_id: "datacite") + token = + User.generate_token( + role_id: "consortium_admin", provider_id: "datacite", + ) subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "provider_admin" do - token = User.generate_token(role_id: "provider_admin", provider_id: "datacite") + token = + User.generate_token( + role_id: "provider_admin", provider_id: "datacite", + ) subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "provider_user" do - token = User.generate_token(role_id: "provider_user", provider_id: "datacite") + token = + User.generate_token(role_id: "provider_user", provider_id: "datacite") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "client_admin" do - token = User.generate_token(role_id: "client_admin", client_id: "datacite.rph") + token = + User.generate_token( + role_id: "client_admin", client_id: "datacite.rph", + ) subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "client_user" do - token = User.generate_token(role_id: "client_user", client_id: "datacite.rph") + token = + User.generate_token(role_id: "client_user", client_id: "datacite.rph") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "user" do token = User.generate_token(role_id: "user") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "temporary" do token = User.generate_token(role_id: "temporary") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "anonymous" do token = User.generate_token(role_id: "anonymous") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end end context "draft doi" do - let(:consortium) { create(:provider, symbol: "DC", role_name: "ROLE_CONSORTIUM") } - let(:provider) { create(:provider, symbol: "DATACITE", consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION") } - let(:client) { create(:client, provider: provider, symbol: "DATACITE.RPH") } + let(:consortium) do + create(:provider, symbol: "DC", role_name: "ROLE_CONSORTIUM") + end + let(:provider) do + create( + :provider, + symbol: "DATACITE", + consortium: consortium, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + ) + end + let(:client) do + create(:client, provider: provider, symbol: "DATACITE.RPH") + end let(:doi) { create(:doi, client: client) } it "staff_admin" do token = User.generate_token(role_id: "staff_admin") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "staff_user" do token = User.generate_token(role_id: "staff_user") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "consortium_admin" do - token = User.generate_token(role_id: "consortium_admin", provider_id: "dc") + token = + User.generate_token(role_id: "consortium_admin", provider_id: "dc") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "provider_admin" do - token = User.generate_token(role_id: "provider_admin", provider_id: "datacite") + token = + User.generate_token( + role_id: "provider_admin", provider_id: "datacite", + ) subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "provider_user" do - token = User.generate_token(role_id: "provider_user", provider_id: "datacite") + token = + User.generate_token(role_id: "provider_user", provider_id: "datacite") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "client_admin" do - token = User.generate_token(role_id: "client_admin", client_id: "datacite.rph") + token = + User.generate_token( + role_id: "client_admin", client_id: "datacite.rph", + ) subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "client_user" do - token = User.generate_token(role_id: "client_user", client_id: "datacite.rph") + token = + User.generate_token(role_id: "client_user", client_id: "datacite.rph") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be false + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be false end it "user" do token = User.generate_token(role_id: "user") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be true + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be true end it "temporary" do token = User.generate_token(role_id: "temporary") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be true + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be true end it "anonymous" do token = User.generate_token(role_id: "anonymous") subject = User.new(token) - expect(subject.not_allowed_by_doi_and_user(doi: doi, user: subject)).to be true + expect( + subject.not_allowed_by_doi_and_user(doi: doi, user: subject), + ).to be true end end end @@ -253,7 +326,8 @@ describe "encode_auth_param" do it "works" do - credentials = subject.encode_auth_param(username: subject.symbol, password: 12345) + credentials = + subject.encode_auth_param(username: subject.symbol, password: 12_345) expect(credentials).to start_with("VEVT") end @@ -266,17 +340,45 @@ describe "decode_auth_param" do it "provider" do - expect(subject.decode_auth_param(username: subject.symbol, password: "12345")).to eq("uid" => subject.symbol.downcase, "name" => subject.name, "email" => subject.system_email, "role_id" => "provider_admin", "provider_id" => subject.symbol.downcase) + expect( + subject.decode_auth_param(username: subject.symbol, password: "12345"), + ).to eq( + "uid" => subject.symbol.downcase, + "name" => subject.name, + "email" => subject.system_email, + "role_id" => "provider_admin", + "provider_id" => subject.symbol.downcase, + ) end it "admin" do - subject = create(:provider, symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345") - expect(subject.decode_auth_param(username: subject.symbol, password: "12345")).to eq("uid" => subject.symbol.downcase, "name" => subject.name, "email" => subject.system_email, "role_id" => "staff_admin") + subject = + create( + :provider, + symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345", + ) + expect( + subject.decode_auth_param(username: subject.symbol, password: "12345"), + ).to eq( + "uid" => subject.symbol.downcase, + "name" => subject.name, + "email" => subject.system_email, + "role_id" => "staff_admin", + ) end it "consortium" do - subject = create(:provider, role_name: "ROLE_CONSORTIUM", password_input: "12345") - expect(subject.decode_auth_param(username: subject.symbol, password: "12345")).to eq("uid" => subject.symbol.downcase, "name" => subject.name, "email" => subject.system_email, "role_id" => "consortium_admin", "provider_id" => subject.symbol.downcase) + subject = + create(:provider, role_name: "ROLE_CONSORTIUM", password_input: "12345") + expect( + subject.decode_auth_param(username: subject.symbol, password: "12345"), + ).to eq( + "uid" => subject.symbol.downcase, + "name" => subject.name, + "email" => subject.system_email, + "role_id" => "consortium_admin", + "provider_id" => subject.symbol.downcase, + ) end end end @@ -286,13 +388,35 @@ describe "decode_auth_param" do it "works" do - expect(subject.decode_auth_param(username: subject.symbol, password: 12345)).to eq("uid" => subject.symbol.downcase, "name" => subject.name, "email" => subject.system_email, "password" => "12345", "role_id" => "client_admin", "provider_id" => subject.provider_id, "client_id" => subject.symbol.downcase) + expect( + subject.decode_auth_param(username: subject.symbol, password: 12_345), + ).to eq( + "uid" => subject.symbol.downcase, + "name" => subject.name, + "email" => subject.system_email, + "password" => "12345", + "role_id" => "client_admin", + "provider_id" => subject.provider_id, + "client_id" => subject.symbol.downcase, + ) end end describe "get_payload" do it "works" do - expect(subject.get_payload(uid: subject.symbol.downcase, user: subject, password: 12345)).to eq("uid" => subject.symbol.downcase, "name" => subject.name, "email" => subject.system_email, "password" => 12345, "role_id" => "client_admin", "provider_id" => subject.provider_id, "client_id" => subject.symbol.downcase) + expect( + subject.get_payload( + uid: subject.symbol.downcase, user: subject, password: 12_345, + ), + ).to eq( + "uid" => subject.symbol.downcase, + "name" => subject.name, + "email" => subject.system_email, + "password" => 12_345, + "role_id" => "client_admin", + "provider_id" => subject.provider_id, + "client_id" => subject.symbol.downcase, + ) end end end diff --git a/spec/concerns/countable_spec.rb b/spec/concerns/countable_spec.rb index 2264178cf..d2f48ea05 100644 --- a/spec/concerns/countable_spec.rb +++ b/spec/concerns/countable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe "Providers", type: :controller, elasticsearch: true do @@ -5,19 +7,23 @@ describe "provider_count" do before do - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) @providers = create_list(:provider, 3) end it "counts all providers" do Provider.import sleep 2 - expect(subject.provider_count).to eq([{ "count" => 3, "id" => "2015", "title" => "2015" }, - { "count" => 3, "id" => "2016", "title" => "2016" }, - { "count" => 3, "id" => "2017", "title" => "2017" }, - { "count" => 3, "id" => "2018", "title" => "2018" }, - { "count" => 3, "id" => "2019", "title" => "2019" }, - { "count" => 3, "id" => "2020", "title" => "2020" }]) + expect(subject.provider_count).to eq( + [ + { "count" => 3, "id" => "2015", "title" => "2015" }, + { "count" => 3, "id" => "2016", "title" => "2016" }, + { "count" => 3, "id" => "2017", "title" => "2017" }, + { "count" => 3, "id" => "2018", "title" => "2018" }, + { "count" => 3, "id" => "2019", "title" => "2019" }, + { "count" => 3, "id" => "2020", "title" => "2020" }, + ], + ) end it "takes into account deleted providers" do @@ -25,30 +31,38 @@ @providers.last.update(deleted_at: "2015-06-14") Provider.import sleep 2 - expect(subject.provider_count).to eq([{ "count" => 1, "id" => "2018", "title" => "2018" }, - { "count" => 1, "id" => "2019", "title" => "2019" }, - { "count" => 1, "id" => "2020", "title" => "2020" }, - { "count" => 2, "id" => "2015", "title" => "2015" }, - { "count" => 2, "id" => "2016", "title" => "2016" }, - { "count" => 2, "id" => "2017", "title" => "2017" }]) + expect(subject.provider_count).to eq( + [ + { "count" => 1, "id" => "2018", "title" => "2018" }, + { "count" => 1, "id" => "2019", "title" => "2019" }, + { "count" => 1, "id" => "2020", "title" => "2020" }, + { "count" => 2, "id" => "2015", "title" => "2015" }, + { "count" => 2, "id" => "2016", "title" => "2016" }, + { "count" => 2, "id" => "2017", "title" => "2017" }, + ], + ) end end describe "client_count" do before do - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) @clients = create_list(:client, 3) end it "counts all clients" do Client.import sleep 2 - expect(subject.client_count).to eq([{ "count" => 3, "id" => "2015", "title" => "2015" }, - { "count" => 3, "id" => "2016", "title" => "2016" }, - { "count" => 3, "id" => "2017", "title" => "2017" }, - { "count" => 3, "id" => "2018", "title" => "2018" }, - { "count" => 3, "id" => "2019", "title" => "2019" }, - { "count" => 3, "id" => "2020", "title" => "2020" }]) + expect(subject.client_count).to eq( + [ + { "count" => 3, "id" => "2015", "title" => "2015" }, + { "count" => 3, "id" => "2016", "title" => "2016" }, + { "count" => 3, "id" => "2017", "title" => "2017" }, + { "count" => 3, "id" => "2018", "title" => "2018" }, + { "count" => 3, "id" => "2019", "title" => "2019" }, + { "count" => 3, "id" => "2020", "title" => "2020" }, + ], + ) end it "takes into account deleted clients" do @@ -56,38 +70,63 @@ @clients.last.update(deleted_at: "2015-06-14") Client.import sleep 2 - expect(subject.client_count).to eq([{ "count" => 1, "id" => "2018", "title" => "2018" }, - { "count" => 1, "id" => "2019", "title" => "2019" }, - { "count" => 1, "id" => "2020", "title" => "2020" }, - { "count" => 2, "id" => "2015", "title" => "2015" }, - { "count" => 2, "id" => "2016", "title" => "2016" }, - { "count" => 2, "id" => "2017", "title" => "2017" }]) + expect(subject.client_count).to eq( + [ + { "count" => 1, "id" => "2018", "title" => "2018" }, + { "count" => 1, "id" => "2019", "title" => "2019" }, + { "count" => 1, "id" => "2020", "title" => "2020" }, + { "count" => 2, "id" => "2015", "title" => "2015" }, + { "count" => 2, "id" => "2016", "title" => "2016" }, + { "count" => 2, "id" => "2017", "title" => "2017" }, + ], + ) end end describe "doi_count" do before do - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) end - let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM", symbol: "DC") } - let(:provider) { create(:provider, consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION", symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: "DATACITE.TEST") } - let!(:datacite_dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let(:consortium) do + create(:provider, role_name: "ROLE_CONSORTIUM", symbol: "DC") + end + let(:provider) do + create( + :provider, + consortium: consortium, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + symbol: "DATACITE", + ) + end + let(:client) do + create(:client, provider: provider, symbol: "DATACITE.TEST") + end + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end let!(:datacite_doi) { create(:doi, type: "DataciteDoi") } it "counts all dois" do DataciteDoi.import sleep 2 - expect(subject.doi_count).to eq([{ "count" => 4, "id" => "2015", "title" => "2015" }]) + expect(subject.doi_count).to eq( + [{ "count" => 4, "id" => "2015", "title" => "2015" }], + ) end it "counts all consortium dois" do DataciteDoi.import sleep 2 - expect(subject.doi_count(consortium_id: "dc")).to eq([{ "count" => 3, "id" => "2015", "title" => "2015" }]) + expect(subject.doi_count(consortium_id: "dc")).to eq( + [{ "count" => 3, "id" => "2015", "title" => "2015" }], + ) end it "counts all consortium dois no dois" do @@ -101,7 +140,9 @@ DataciteDoi.import sleep 2 - expect(subject.doi_count(provider_id: "datacite")).to eq([{ "count" => 3, "id" => "2015", "title" => "2015" }]) + expect(subject.doi_count(provider_id: "datacite")).to eq( + [{ "count" => 3, "id" => "2015", "title" => "2015" }], + ) end it "counts all provider dois no dois" do @@ -115,7 +156,9 @@ DataciteDoi.import sleep 2 - expect(subject.doi_count(client_id: "datacite.test")).to eq([{ "count" => 3, "id" => "2015", "title" => "2015" }]) + expect(subject.doi_count(client_id: "datacite.test")).to eq( + [{ "count" => 3, "id" => "2015", "title" => "2015" }], + ) end it "counts all client dois no dois" do diff --git a/spec/concerns/crosscitable_spec.rb b/spec/concerns/crosscitable_spec.rb index 1d03839e1..54874a3dd 100644 --- a/spec/concerns/crosscitable_spec.rb +++ b/spec/concerns/crosscitable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Doi, vcr: true do @@ -13,22 +15,29 @@ it "clean_xml malformed" do string = file_fixture("datacite_malformed.xml").read - expect { subject.clean_xml(string) }.to raise_error(Nokogiri::XML::SyntaxError, "39:18: FATAL: EndTag: '') + expect(subject.clean_xml(string)).to start_with( + "", + ) end it "clean_xml utf-16" do string = file_fixture("utf-16.xml").read - expect(subject.clean_xml(string)).to start_with('') + expect(subject.clean_xml(string)).to start_with( + "", + ) end end @@ -40,7 +49,10 @@ it "from_xml malformed" do string = file_fixture("datacite_malformed.xml").read - expect { subject.from_xml(string) }.to raise_error(Nokogiri::XML::SyntaxError, "40:1: FATAL: EndTag: ' "Fenner", "givenName" => "Martin", "name" => "Fenner, Martin", - "nameIdentifiers" => - [{ "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", - "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }] }]) + expect(meta["creators"]).to eq( + [ + { + "familyName" => "Fenner", + "givenName" => "Martin", + "name" => "Fenner, Martin", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + }, + ], + ) expect(meta["titles"]).to eq([{ "title" => "Eating your own Dog Food" }]) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("DataCite") @@ -165,8 +210,17 @@ expect(meta["from"]).to eq("datacite") expect(meta["doi"]).to eq("10.5061/dryad.8515") expect(meta["creators"].length).to eq(8) - expect(meta["creators"].first).to eq("familyName" => "Ollomo", "givenName" => "Benjamin", "name" => "Ollomo, Benjamin", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => []) - expect(meta["titles"]).to eq([{ "title" => "Data from: A new malaria agent in African hominids." }]) + expect(meta["creators"].first).to eq( + "familyName" => "Ollomo", + "givenName" => "Benjamin", + "name" => "Ollomo, Benjamin", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + ) + expect(meta["titles"]).to eq( + [{ "title" => "Data from: A new malaria agent in African hominids." }], + ) expect(meta["publication_year"]).to eq("2011") expect(meta["publisher"]).to eq("Dryad Digital Repository") end @@ -178,11 +232,36 @@ expect(meta["string"]).to eq(string) expect(meta["from"]).to eq("datacite") expect(meta["doi"]).to eq("10.14454/testpub") - expect(meta["creators"]).to eq([{ "familyName" => "Smith", "givenName" => "John", "name" => "Smith, John", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => [] }, { "name" => "つまらないものですが", "nameIdentifiers" => - [{ "nameIdentifier" => "abc123", - "nameIdentifierScheme" => "ISNI" }], - "affiliation" => [] }]) - expect(meta["titles"]).to eq([{ "title" => "Właściwości rzutowań podprzestrzeniowych" }, { "title" => "Translation of Polish titles", "titleType" => "TranslatedTitle" }]) + expect(meta["creators"]).to eq( + [ + { + "familyName" => "Smith", + "givenName" => "John", + "name" => "Smith, John", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + }, + { + "name" => "つまらないものですが", + "nameIdentifiers" => [ + { + "nameIdentifier" => "abc123", "nameIdentifierScheme" => "ISNI" + }, + ], + "affiliation" => [], + }, + ], + ) + expect(meta["titles"]).to eq( + [ + { "title" => "Właściwości rzutowań podprzestrzeniowych" }, + { + "title" => "Translation of Polish titles", + "titleType" => "TranslatedTitle", + }, + ], + ) expect(meta["publication_year"]).to eq("2010") expect(meta["publisher"]).to eq("Springer") end @@ -222,11 +301,36 @@ expect(meta["from"]).to eq("crossref") expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "affiliation" => [{ "name" => "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland" }], "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "affiliation" => [ + { + "name" => + "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland", + }, + ], + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("eLife Sciences Publications, Ltd") - expect(meta["container"]).to eq("firstPage" => "e01567", "identifier" => "2050-084X", "identifierType" => "ISSN", "title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "firstPage" => "e01567", + "identifier" => "2050-084X", + "identifierType" => "ISSN", + "title" => "eLife", + "type" => "Journal", + "volume" => "3", + ) end it "from crossref url" do @@ -236,11 +340,36 @@ expect(meta["from"]).to eq("crossref") expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "affiliation" => [{ "name" => "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland" }], "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "affiliation" => [ + { + "name" => + "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland", + }, + ], + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("eLife Sciences Publications, Ltd") - expect(meta["container"]).to eq("firstPage" => "e01567", "identifier" => "2050-084X", "identifierType" => "ISSN", "title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "firstPage" => "e01567", + "identifier" => "2050-084X", + "identifierType" => "ISSN", + "title" => "eLife", + "type" => "Journal", + "volume" => "3", + ) expect(meta["agency"]).to eq("crossref") end @@ -250,11 +379,19 @@ expect(meta["from"]).to eq("datacite") expect(meta["doi"]).to eq("10.14454/1x4x-9056") expect(meta["creators"].length).to eq(1) - expect(meta["creators"].first).to eq("familyName" => "Fenner", - "givenName" => "Martin", - "name" => "Fenner, Martin", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal") + expect(meta["creators"].first).to eq( + "familyName" => "Fenner", + "givenName" => "Martin", + "name" => "Fenner, Martin", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + ) expect(meta["titles"]).to eq([{ "title" => "Cool DOI's" }]) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("DataCite") @@ -269,11 +406,29 @@ expect(meta["from"]).to eq("bibtex") expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("{eLife} Sciences Organisation, Ltd.") - expect(meta["container"]).to eq("identifier" => "2050-084X", "identifierType" => "ISSN", "title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "identifier" => "2050-084X", + "identifierType" => "ISSN", + "title" => "eLife", + "type" => "Journal", + "volume" => "3", + ) end it "from ris" do @@ -284,11 +439,27 @@ expect(meta["from"]).to eq("ris") expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => []) - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("(:unav)") - expect(meta["container"]).to eq("title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "title" => "eLife", "type" => "Journal", "volume" => "3", + ) end it "from codemeta" do @@ -299,13 +470,23 @@ expect(meta["from"]).to eq("codemeta") expect(meta["doi"]).to eq("10.5063/f1m61h5x") expect(meta["creators"].length).to eq(3) - expect(meta["creators"].first).to eq("affiliation" => [{ "name" => "NCEAS" }], - "familyName" => "Jones", - "givenName" => "Matt", - "name" => "Jones, Matt", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-0077-4738", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "R Interface to the DataONE REST API" }]) + expect(meta["creators"].first).to eq( + "affiliation" => [{ "name" => "NCEAS" }], + "familyName" => "Jones", + "givenName" => "Matt", + "name" => "Jones, Matt", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-0077-4738", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [{ "title" => "R Interface to the DataONE REST API" }], + ) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("https://cran.r-project.org") end @@ -318,9 +499,19 @@ expect(meta["from"]).to eq("schema_org") expect(meta["doi"]).to eq("10.5438/4k3m-nyvg") expect(meta["creators"].length).to eq(1) - expect(meta["creators"].first).to eq("familyName" => "Fenner", "givenName" => "Martin", "name" => "Fenner, Martin", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal") + expect(meta["creators"].first).to eq( + "familyName" => "Fenner", + "givenName" => "Martin", + "name" => "Fenner, Martin", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + ) expect(meta["titles"]).to eq([{ "title" => "Eating your own Dog Food" }]) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("DataCite") @@ -334,8 +525,20 @@ expect(meta["from"]).to eq("schema_org") expect(meta["doi"]).to eq("10.1594/pangaea.836178") expect(meta["creators"].length).to eq(8) - expect(meta["creators"].first).to eq("familyName" => "Johansson", "givenName" => "Emma", "name" => "Johansson, Emma", "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Hydrological and meteorological investigations in a lake near Kangerlussuaq, west Greenland" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Johansson", + "givenName" => "Emma", + "name" => "Johansson, Emma", + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Hydrological and meteorological investigations in a lake near Kangerlussuaq, west Greenland", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("PANGAEA") expect(meta["schema_version"]).to eq(nil) @@ -361,8 +564,17 @@ expect(meta["doi"]).to eq("10.5061/dryad.8515") expect(meta["creators"].length).to eq(8) - expect(meta["creators"].first).to eq("familyName" => "Ollomo", "givenName" => "Benjamin", "name" => "Ollomo, Benjamin", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => []) - expect(meta["titles"]).to eq([{ "title" => "Data from: A new malaria agent in African hominids." }]) + expect(meta["creators"].first).to eq( + "familyName" => "Ollomo", + "givenName" => "Benjamin", + "name" => "Ollomo, Benjamin", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + ) + expect(meta["titles"]).to eq( + [{ "title" => "Data from: A new malaria agent in African hominids." }], + ) expect(meta["publication_year"]).to eq("2011") expect(meta["publisher"]).to eq("Dryad Digital Repository") end @@ -372,11 +584,36 @@ meta = subject.parse_xml(string) expect(meta["doi"]).to eq("10.14454/testpub") - expect(meta["creators"]).to eq([{ "familyName" => "Smith", "givenName" => "John", "name" => "Smith, John", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => [] }, { "name" => "つまらないものですが", "nameIdentifiers" => - [{ "nameIdentifier" => "abc123", - "nameIdentifierScheme" => "ISNI" }], - "affiliation" => [] }]) - expect(meta["titles"]).to eq([{ "title" => "Właściwości rzutowań podprzestrzeniowych" }, { "title" => "Translation of Polish titles", "titleType" => "TranslatedTitle" }]) + expect(meta["creators"]).to eq( + [ + { + "familyName" => "Smith", + "givenName" => "John", + "name" => "Smith, John", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + }, + { + "name" => "つまらないものですが", + "nameIdentifiers" => [ + { + "nameIdentifier" => "abc123", "nameIdentifierScheme" => "ISNI" + }, + ], + "affiliation" => [], + }, + ], + ) + expect(meta["titles"]).to eq( + [ + { "title" => "Właściwości rzutowań podprzestrzeniowych" }, + { + "title" => "Translation of Polish titles", + "titleType" => "TranslatedTitle", + }, + ], + ) expect(meta["publication_year"]).to eq("2010") expect(meta["publisher"]).to eq("Springer") end @@ -398,11 +635,36 @@ expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "affiliation" => [{ "name" => "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland" }], "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "affiliation" => [ + { + "name" => + "Department of Plant Molecular Biology, University of Lausanne, Lausanne, Switzerland", + }, + ], + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("eLife Sciences Publications, Ltd") - expect(meta["container"]).to eq("firstPage" => "e01567", "identifier" => "2050-084X", "identifierType" => "ISSN", "title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "firstPage" => "e01567", + "identifier" => "2050-084X", + "identifierType" => "ISSN", + "title" => "eLife", + "type" => "Journal", + "volume" => "3", + ) end it "from bibtex" do @@ -411,11 +673,29 @@ expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("{eLife} Sciences Organisation, Ltd.") - expect(meta["container"]).to eq("identifier" => "2050-084X", "identifierType" => "ISSN", "title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "identifier" => "2050-084X", + "identifierType" => "ISSN", + "title" => "eLife", + "type" => "Journal", + "volume" => "3", + ) end it "from ris" do @@ -424,11 +704,27 @@ expect(meta["doi"]).to eq("10.7554/elife.01567") expect(meta["creators"].length).to eq(5) - expect(meta["creators"].first).to eq("familyName" => "Sankar", "givenName" => "Martial", "name" => "Sankar, Martial", "nameType" => "Personal", "nameIdentifiers" => [], "affiliation" => []) - expect(meta["titles"]).to eq([{ "title" => "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth" }]) + expect(meta["creators"].first).to eq( + "familyName" => "Sankar", + "givenName" => "Martial", + "name" => "Sankar, Martial", + "nameType" => "Personal", + "nameIdentifiers" => [], + "affiliation" => [], + ) + expect(meta["titles"]).to eq( + [ + { + "title" => + "Automated quantitative histology reveals vascular morphodynamics during Arabidopsis hypocotyl secondary growth", + }, + ], + ) expect(meta["publication_year"]).to eq("2014") expect(meta["publisher"]).to eq("(:unav)") - expect(meta["container"]).to eq("title" => "eLife", "type" => "Journal", "volume" => "3") + expect(meta["container"]).to eq( + "title" => "eLife", "type" => "Journal", "volume" => "3", + ) end it "from codemeta" do @@ -437,13 +733,23 @@ expect(meta["doi"]).to eq("10.5063/f1m61h5x") expect(meta["creators"].length).to eq(3) - expect(meta["creators"].first).to eq("affiliation" => [{ "name" => "NCEAS" }], - "familyName" => "Jones", - "givenName" => "Matt", - "name" => "Jones, Matt", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-0077-4738", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal") - expect(meta["titles"]).to eq([{ "title" => "R Interface to the DataONE REST API" }]) + expect(meta["creators"].first).to eq( + "affiliation" => [{ "name" => "NCEAS" }], + "familyName" => "Jones", + "givenName" => "Matt", + "name" => "Jones, Matt", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-0077-4738", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + ) + expect(meta["titles"]).to eq( + [{ "title" => "R Interface to the DataONE REST API" }], + ) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("https://cran.r-project.org") end @@ -454,9 +760,19 @@ expect(meta["doi"]).to eq("10.5438/4k3m-nyvg") expect(meta["creators"].length).to eq(1) - expect(meta["creators"].first).to eq("familyName" => "Fenner", "givenName" => "Martin", "name" => "Fenner, Martin", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal") + expect(meta["creators"].first).to eq( + "familyName" => "Fenner", + "givenName" => "Martin", + "name" => "Fenner, Martin", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-1419-2405", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + ) expect(meta["titles"]).to eq([{ "title" => "Eating your own Dog Food" }]) expect(meta["publication_year"]).to eq("2016") expect(meta["publisher"]).to eq("DataCite") diff --git a/spec/concerns/facetable_spec.rb b/spec/concerns/facetable_spec.rb index f619b13eb..7ac8c9ea1 100644 --- a/spec/concerns/facetable_spec.rb +++ b/spec/concerns/facetable_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # describe 'Clients', type: :controller do diff --git a/spec/concerns/helpable_spec.rb b/spec/concerns/helpable_spec.rb index f03b1067c..b77554491 100644 --- a/spec/concerns/helpable_spec.rb +++ b/spec/concerns/helpable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Doi, vcr: true do @@ -53,31 +55,42 @@ it "should generate with seed" do str = "10.14454" - number = 123456 - expect(subject.generate_random_dois(str, number: number)).to eq(["10.14454/003r-j076"]) + number = 123_456 + expect(subject.generate_random_dois(str, number: number)).to eq( + %w[10.14454/003r-j076], + ) end it "should generate with seed checksum" do str = "10.14454" - number = 1234578 - expect(subject.generate_random_dois(str, number: number)).to eq(["10.14454/015n-mj18"]) + number = 1_234_578 + expect(subject.generate_random_dois(str, number: number)).to eq( + %w[10.14454/015n-mj18], + ) end it "should generate with another seed checksum" do str = "10.14454" - number = 1234579 - expect(subject.generate_random_dois(str, number: number)).to eq(["10.14454/015n-mk15"]) + number = 1_234_579 + expect(subject.generate_random_dois(str, number: number)).to eq( + %w[10.14454/015n-mk15], + ) end it "should generate with shoulder" do str = "10.14454/fk2" - number = 123456 - expect(subject.generate_random_dois(str, number: number)).to eq(["10.14454/fk2-003r-j076"]) + number = 123_456 + expect(subject.generate_random_dois(str, number: number)).to eq( + %w[10.14454/fk2-003r-j076], + ) end it "should not generate if not DOI prefix" do str = "20.5438" - expect { subject.generate_random_dois(str) }.to raise_error(IdentifierError, "No valid prefix found") + expect { subject.generate_random_dois(str) }.to raise_error( + IdentifierError, + "No valid prefix found", + ) end end @@ -95,42 +108,74 @@ it "uses *" do domains = "*" url = "https://blog.datacite.org/bla-bla" - expect(subject.match_url_with_domains(domains: domains, url: url)).to be true + expect( + subject.match_url_with_domains(domains: domains, url: url), + ).to be true end it "specific host should be in list" do domains = "blog.datacite.org,blog.example.com" url = "https://blog.datacite.org/bla-bla" - expect(subject.match_url_with_domains(domains: domains, url: url)).to be true + expect( + subject.match_url_with_domains(domains: domains, url: url), + ).to be true end it "wildcard host should be in list" do domains = "*.datacite.org,blog.example.com" url = "https://blog.datacite.org/bla-bla" - expect(subject.match_url_with_domains(domains: domains, url: url)).to be true + expect( + subject.match_url_with_domains(domains: domains, url: url), + ).to be true end end context "register_doi", order: :defined do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"]) } + let(:client) do + create(:client, provider: provider, symbol: ENV["MDS_USERNAME"]) + end - subject { build(:doi, doi: "10.5438/mcnv-ga6n", url: "https://blog.datacite.org/", client: client, aasm_state: "findable") } + subject do + build( + :doi, + doi: "10.5438/mcnv-ga6n", + url: "https://blog.datacite.org/", + client: client, + aasm_state: "findable", + ) + end it "should register" do - expect(subject.register_url.body).to eq("data" => { "responseCode" => 1, "handle" => "10.5438/MCNV-GA6N" }) + expect(subject.register_url.body).to eq( + "data" => { "responseCode" => 1, "handle" => "10.5438/MCNV-GA6N" }, + ) expect(subject.minted.iso8601).to be_present response = subject.get_url expect(response.body.dig("data", "responseCode")).to eq(1) - expect(response.body.dig("data", "values")).to eq([{ "index" => 1, "type" => "URL", "data" => { "format" => "string", "value" => "https://blog.datacite.org/" }, "ttl" => 86400, "timestamp" => "2020-07-26T08:55:31Z" }]) + expect(response.body.dig("data", "values")).to eq( + [ + { + "index" => 1, + "type" => "URL", + "data" => { + "format" => "string", "value" => "https://blog.datacite.org/" + }, + "ttl" => 86_400, + "timestamp" => "2020-07-26T08:55:31Z", + }, + ], + ) end context "https to http" do it "should convert" do url = "https://orcid.org/0000-0003-1419-2405" - expect(subject.https_to_http(url)).to eq("http://orcid.org/0000-0003-1419-2405") + expect(subject.https_to_http(url)).to eq( + "http://orcid.org/0000-0003-1419-2405", + ) end it "should ignore http" do @@ -155,45 +200,116 @@ it "should change url" do subject.url = "https://blog.datacite.org/re3data-science-europe/" - expect(subject.register_url.body).to eq("data" => { "responseCode" => 1, "handle" => "10.5438/MCNV-GA6N" }) + expect(subject.register_url.body).to eq( + "data" => { "responseCode" => 1, "handle" => "10.5438/MCNV-GA6N" }, + ) expect(subject.minted.iso8601).to be_present response = subject.get_url expect(response.body.dig("data", "responseCode")).to eq(1) - expect(response.body.dig("data", "values")).to eq([{ "index" => 1, "type" => "URL", "data" => { "format" => "string", "value" => "https://blog.datacite.org/re3data-science-europe/" }, "ttl" => 86400, "timestamp" => "2020-07-26T08:55:35Z" }]) + expect(response.body.dig("data", "values")).to eq( + [ + { + "index" => 1, + "type" => "URL", + "data" => { + "format" => "string", + "value" => "https://blog.datacite.org/re3data-science-europe/", + }, + "ttl" => 86_400, + "timestamp" => "2020-07-26T08:55:35Z", + }, + ], + ) end it "draft doi" do - subject = build(:doi, doi: "10.5438/mcnv-ga6n", url: "https://blog.datacite.org/", client: client, aasm_state: "draft") - expect { subject.register_url }.to raise_error(ActionController::BadRequest, "DOI is not registered or findable.") + subject = + build( + :doi, + doi: "10.5438/mcnv-ga6n", + url: "https://blog.datacite.org/", + client: client, + aasm_state: "draft", + ) + expect { subject.register_url }.to raise_error( + ActionController::BadRequest, + "DOI is not registered or findable.", + ) end it "missing username" do - subject = build(:doi, doi: "10.5438/mcnv-ga6n", url: "https://blog.datacite.org/re3data-science-europe/", client: nil, aasm_state: "findable") - expect { subject.register_url }.to raise_error(ActionController::BadRequest, "[Handle] Error updating DOI 10.5438/MCNV-GA6N: client ID missing.") + subject = + build( + :doi, + doi: "10.5438/mcnv-ga6n", + url: "https://blog.datacite.org/re3data-science-europe/", + client: nil, + aasm_state: "findable", + ) + expect { subject.register_url }.to raise_error( + ActionController::BadRequest, + "[Handle] Error updating DOI 10.5438/MCNV-GA6N: client ID missing.", + ) end it "server not responsible" do - subject = build(:doi, doi: "10.1371/journal.pbio.2001414", url: "https://journals.plos.org/plosbiology/article?id=10.1371/journal.pbio.2001414", client: client, aasm_state: "findable") - expect(subject.register_url.body).to eq("errors" => [{ "status" => 400, "title" => { "responseCode" => 301, "message" => "That prefix doesn't live here", "handle" => "10.1371/JOURNAL.PBIO.2001414" } }]) + subject = + build( + :doi, + doi: "10.1371/journal.pbio.2001414", + url: + "https://journals.plos.org/plosbiology/article?id=10.1371/journal.pbio.2001414", + client: client, + aasm_state: "findable", + ) + expect(subject.register_url.body).to eq( + "errors" => [ + { + "status" => 400, + "title" => { + "responseCode" => 301, + "message" => "That prefix doesn't live here", + "handle" => "10.1371/JOURNAL.PBIO.2001414", + }, + }, + ], + ) # expect { subject.register_url }.to raise_error(ActionController::BadRequest, "No valid prefix found") end end context "get_dois" do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) } + let(:client) do + create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) + end it "should get dois" do - options = { prefix: "10.5438", username: client.symbol, password: client.password, role_id: "client_admin" } + options = { + prefix: "10.5438", + username: client.symbol, + password: client.password, + role_id: "client_admin", + } dois = Doi.get_dois(options) expect(dois.length).to eq(446) expect(dois.first).to eq("10.5438/0000-00SS") end it "should handle zero dois" do - options = { prefix: "10.70001", username: client.symbol, password: client.password, role_id: "client_admin" } + options = { + prefix: "10.70001", + username: client.symbol, + password: client.password, + role_id: "client_admin", + } dois = Doi.get_dois(options) expect(dois.length).to eq(0) end diff --git a/spec/concerns/indexable_spec.rb b/spec/concerns/indexable_spec.rb index 3fcb3a05d..84d665073 100644 --- a/spec/concerns/indexable_spec.rb +++ b/spec/concerns/indexable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe "Indexable", vcr: true do @@ -69,7 +71,15 @@ end context "doi" do - let!(:doi) { create(:doi, titles: { title: "Soil investigations" }, publisher: "Pangaea", descriptions: { description: "this is a description" }, aasm_state: "findable") } + let!(:doi) do + create( + :doi, + titles: { title: "Soil investigations" }, + publisher: "Pangaea", + descriptions: { description: "this is a description" }, + aasm_state: "findable", + ) + end let!(:dois) { create_list(:doi, 3, aasm_state: "findable") } before do @@ -133,7 +143,11 @@ expect(response.results.to_a.length).to eq(2) # Move onto next based on scroll_id - response = Doi.query(nil, page: { size: 1, scroll: "1m" }, scroll_id: response.scroll_id) + response = + Doi.query( + nil, + page: { size: 1, scroll: "1m" }, scroll_id: response.scroll_id, + ) expect(response.results.to_a.length).to eq(2) end diff --git a/spec/concerns/mailable_spec.rb b/spec/concerns/mailable_spec.rb index aa588794b..52ffefed0 100644 --- a/spec/concerns/mailable_spec.rb +++ b/spec/concerns/mailable_spec.rb @@ -1,9 +1,24 @@ +# frozen_string_literal: true + require "rails_helper" describe "Mailable", type: :model, vcr: true do let(:token) { User.generate_token } - let(:provider) { create(:provider, symbol: "DATACITE", name: "DataCite", system_email: "test@datacite.org") } - let(:client) { create(:client, symbol: "DATACITE.DATACITE", name: "DataCite Repository", system_email: "test@datacite.org", provider: provider) } + let(:provider) do + create( + :provider, + symbol: "DATACITE", name: "DataCite", system_email: "test@datacite.org", + ) + end + let(:client) do + create( + :client, + symbol: "DATACITE.DATACITE", + name: "DataCite Repository", + system_email: "test@datacite.org", + provider: provider, + ) + end let(:title) { "DataCite Fabrica" } it "send_welcome_email" do @@ -34,7 +49,15 @@ template = "users/welcome.text.erb" url = ENV["BRACCO_URL"] + "?jwt=" + token reset_url = ENV["BRACCO_URL"] + "/reset" - text = User.format_message_text(template: template, title: title, contact_name: client.name, name: client.symbol, url: url, reset_url: reset_url) + text = + User.format_message_text( + template: template, + title: title, + contact_name: client.name, + name: client.symbol, + url: url, + reset_url: reset_url, + ) line = text.split("\n").first expect(line).to eq("Dear #{client.name},") end @@ -43,36 +66,58 @@ template = "users/welcome.html.erb" url = ENV["BRACCO_URL"] + "?jwt=" + token reset_url = ENV["BRACCO_URL"] + "/reset" - html = User.format_message_html(template: template, title: title, contact_name: client.name, name: client.symbol, url: url, reset_url: reset_url) + html = + User.format_message_html( + template: template, + title: title, + contact_name: client.name, + name: client.symbol, + url: url, + reset_url: reset_url, + ) line = html.split("\n")[41] - expect(line.strip).to eq("

Dear #{client.name},

") + expect(line.strip).to eq( + "

Dear #{ + client.name + },

", + ) end it "send email message" do text = <<~BODY - Dear #{client.name}, + Dear #{ + client.name + }, - Someone has requested a login link for the DataCite Fabrica '#{client.name}' account. + Someone has requested a login link for the DataCite Fabrica '#{ + client.name + }' account. - You can change your password with the following link: + You can change your password with the following link: - TEST + TEST - This link is valid for 48 hours. + This link is valid for 48 hours. - King regards, + King regards, - DataCite Support + DataCite Support BODY subj = title + ": Password Reset Request" - response = User.send_email_message(name: client.name, email: client.system_email, subject: subj, text: text) + response = + User.send_email_message( + name: client.name, + email: client.system_email, + subject: subj, + text: text, + ) expect(response[:status]).to eq(200) expect(response[:message]).to eq("Queued. Thank you.") end context "send_notification_to_slack" do it "succeeds" do - text = "Using system email #{client.system_email}." + text = "Using system email #{client.system_email}." options = { title: "TEST: new client account #{client.symbol} created." } expect(Client.send_notification_to_slack(text, options)).to eq("ok") end @@ -80,8 +125,10 @@ context "send_notification_to_slack provider" do it "succeeds" do - text = "Using system email #{provider.system_email}." - options = { title: "TEST: new provider account #{provider.symbol} created." } + text = "Using system email #{provider.system_email}." + options = { + title: "TEST: new provider account #{provider.symbol} created.", + } expect(Client.send_notification_to_slack(text, options)).to eq("ok") end end diff --git a/spec/concerns/modelable_spec.rb b/spec/concerns/modelable_spec.rb index 8edbde319..53eb81a5c 100644 --- a/spec/concerns/modelable_spec.rb +++ b/spec/concerns/modelable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Person, vcr: true do diff --git a/spec/concerns/paginatable_spec.rb b/spec/concerns/paginatable_spec.rb index ea2306ce6..2f3d184ed 100644 --- a/spec/concerns/paginatable_spec.rb +++ b/spec/concerns/paginatable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe "Dois", type: :controller do @@ -30,21 +32,30 @@ it "page size too high" do params = ActionController::Parameters.new(page: { size: "1001" }) - expect(subject.page_from_params(params)).to eq(number: 1, size: 1000) + expect(subject.page_from_params(params)).to eq(number: 1, size: 1_000) end it "page cursor" do - params = ActionController::Parameters.new(page: { cursor: "MTMwMjUyMTAxNjAwMCwxMC40MTIyLzEuMTAwMDAwMDAyMg" }) - expect(subject.page_from_params(params)).to eq(cursor: ["1302521016000", "10.4122/1.1000000022"], number: 1, size: 25) + params = + ActionController::Parameters.new( + page: { cursor: "MTMwMjUyMTAxNjAwMCwxMC40MTIyLzEuMTAwMDAwMDAyMg" }, + ) + expect(subject.page_from_params(params)).to eq( + cursor: %w[1302521016000 10.4122/1.1000000022], number: 1, size: 25, + ) end it "page invalid cursor" do params = ActionController::Parameters.new(page: { cursor: "A" }) - expect(subject.page_from_params(params)).to eq(cursor: [], number: 1, size: 25) + expect(subject.page_from_params(params)).to eq( + cursor: [], number: 1, size: 25, + ) end it "page empty cursor" do params = ActionController::Parameters.new(page: { cursor: nil }) - expect(subject.page_from_params(params)).to eq(cursor: [], number: 1, size: 25) + expect(subject.page_from_params(params)).to eq( + cursor: [], number: 1, size: 25, + ) end end diff --git a/spec/concerns/passwordable_spec.rb b/spec/concerns/passwordable_spec.rb index e85db58aa..a909b3619 100644 --- a/spec/concerns/passwordable_spec.rb +++ b/spec/concerns/passwordable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Provider, type: :model do diff --git a/spec/concerns/wikidatable_spec.rb b/spec/concerns/wikidatable_spec.rb index f0034396f..20ae10fe4 100644 --- a/spec/concerns/wikidatable_spec.rb +++ b/spec/concerns/wikidatable_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe "Organization", vcr: true do @@ -13,7 +15,9 @@ expect(organization.name).to eq("University of Cambridge") expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end end @@ -23,12 +27,22 @@ id = "Q35794" response = subject.fetch_wikidata_by_id(id) - expect(response.dig("data", "entities", id, "labels", "en", "value")).to eq("University of Cambridge") - expect(response.dig("data", "entities", id, "descriptions", "en", "value")).to eq("collegiate public research university in Cambridge, England, United Kingdom") + expect( + response.dig("data", "entities", id, "labels", "en", "value"), + ).to eq("University of Cambridge") + expect( + response.dig("data", "entities", id, "descriptions", "en", "value"), + ).to eq( + "collegiate public research university in Cambridge, England, United Kingdom", + ) claims = response.dig("data", "entities", id, "claims") || {} - expect(claims.dig("P2002", 0, "mainsnak", "datavalue", "value")).to eq("Cambridge_Uni") - expect(claims.dig("P571", 0, "mainsnak", "datavalue", "value", "time")).to eq("+1209-01-01T00:00:00Z") + expect(claims.dig("P2002", 0, "mainsnak", "datavalue", "value")).to eq( + "Cambridge_Uni", + ) + expect( + claims.dig("P571", 0, "mainsnak", "datavalue", "value", "time"), + ).to eq("+1209-01-01T00:00:00Z") end end @@ -42,7 +56,9 @@ expect(organization.name).to eq("University of Cambridge") expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end end @@ -58,23 +74,36 @@ employment = subject.get_employments(employments) response = subject.wikidata_query(employment) - expect(response).to eq([{ "organization_id" => "https://grid.ac/institutes/grid.475826.a", - "organization_name" => "DataCite", - "role_title" => "Technical Director", - "start_date" => "2015-08-01T00:00:00Z" }, - { "end_date" => "2017-05-01T00:00:00Z", - "organization_id" => "https://grid.ac/institutes/grid.10423.34", - "organization_name" => "Hannover Medical School", - "role_title" => "Clinical Fellow in Hematology and Oncology", - "start_date" => "2005-11-01T00:00:00Z" }, - { "end_date" => "2015-07-01T00:00:00Z", - "organization_name" => "Public Library of Science", - "role_title" => "Technical lead article-level metrics project (contractor)", - "start_date" => "2012-04-01T00:00:00Z" }, - { "end_date" => "2005-10-01T00:00:00Z", - "organization_name" => "Charité Universitätsmedizin Berlin", - "role_title" => "Resident in Internal Medicine", - "start_date" => "1998-09-01T00:00:00Z" }]) + expect(response).to eq( + [ + { + "organization_id" => "https://grid.ac/institutes/grid.475826.a", + "organization_name" => "DataCite", + "role_title" => "Technical Director", + "start_date" => "2015-08-01T00:00:00Z", + }, + { + "end_date" => "2017-05-01T00:00:00Z", + "organization_id" => "https://grid.ac/institutes/grid.10423.34", + "organization_name" => "Hannover Medical School", + "role_title" => "Clinical Fellow in Hematology and Oncology", + "start_date" => "2005-11-01T00:00:00Z", + }, + { + "end_date" => "2015-07-01T00:00:00Z", + "organization_name" => "Public Library of Science", + "role_title" => + "Technical lead article-level metrics project (contractor)", + "start_date" => "2012-04-01T00:00:00Z", + }, + { + "end_date" => "2005-10-01T00:00:00Z", + "organization_name" => "Charité Universitätsmedizin Berlin", + "role_title" => "Resident in Internal Medicine", + "start_date" => "1998-09-01T00:00:00Z", + }, + ], + ) end it "empty" do diff --git a/spec/controllers/clients_controller_spec.rb b/spec/controllers/clients_controller_spec.rb index 67faca83f..d5fbaddef 100644 --- a/spec/controllers/clients_controller_spec.rb +++ b/spec/controllers/clients_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # RSpec.describe ClientsController, type: :controller do diff --git a/spec/controllers/dois_controller_spec.rb b/spec/controllers/dois_controller_spec.rb index a21012699..97029e559 100644 --- a/spec/controllers/dois_controller_spec.rb +++ b/spec/controllers/dois_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # # This spec was generated by rspec-rails when you ran the scaffold generator. diff --git a/spec/controllers/media_controller_spec.rb b/spec/controllers/media_controller_spec.rb index f85b599ee..a129b717f 100644 --- a/spec/controllers/media_controller_spec.rb +++ b/spec/controllers/media_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # # This spec was generated by rspec-rails when you ran the scaffold generator. diff --git a/spec/controllers/metadata_controller_spec.rb b/spec/controllers/metadata_controller_spec.rb index 7bbc87621..81f808fa0 100644 --- a/spec/controllers/metadata_controller_spec.rb +++ b/spec/controllers/metadata_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # # This spec was generated by rspec-rails when you ran the scaffold generator. diff --git a/spec/controllers/prefixes_controller_spec.rb b/spec/controllers/prefixes_controller_spec.rb index c794aae0b..3fa68e62f 100644 --- a/spec/controllers/prefixes_controller_spec.rb +++ b/spec/controllers/prefixes_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # # This spec was generated by rspec-rails when you ran the scaffold generator. diff --git a/spec/controllers/providers_controller_spec.rb b/spec/controllers/providers_controller_spec.rb index 38b7c2d29..286623ec9 100644 --- a/spec/controllers/providers_controller_spec.rb +++ b/spec/controllers/providers_controller_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # # RSpec.describe ProvidersController, type: :controller do diff --git a/spec/factories/default.rb b/spec/factories/default.rb index d7e2f9e28..971965ecf 100644 --- a/spec/factories/default.rb +++ b/spec/factories/default.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "faker" FactoryBot.define do @@ -32,7 +34,12 @@ orcid_token { nil } end - initialize_with { User.new(User.generate_alb_token(uid: uid, role_id: role_id), type: "oidc") } + initialize_with do + User.new( + User.generate_alb_token(uid: uid, role_id: role_id), + type: "oidc", + ) + end end factory :client do @@ -144,61 +151,27 @@ ] end titles do - [ - { - "title": "Data from: A new malaria agent in African hominids.", - }, - ] + [{ "title": "Data from: A new malaria agent in African hominids." }] end descriptions do - [ - { - "description": "Data from: A new malaria agent in African hominids.", - }, - ] + [{ "description": "Data from: A new malaria agent in African hominids." }] end publisher { "Dryad Digital Repository" } subjects do [ - { - "subject": "Phylogeny", - }, - { - "subject": "Malaria", - }, - { - "subject": "Parasites", - }, - { - "subject": "Taxonomy", - }, - { - "subject": "Mitochondrial genome", - }, - { - "subject": "Africa", - }, - { - "subject": "Plasmodium", - }, - ] - end - dates do - [ - { - "date": "2011", - "dateType": "Issued", - }, + { "subject": "Phylogeny" }, + { "subject": "Malaria" }, + { "subject": "Parasites" }, + { "subject": "Taxonomy" }, + { "subject": "Mitochondrial genome" }, + { "subject": "Africa" }, + { "subject": "Plasmodium" }, ] end - publication_year { 2011 } + dates { [{ "date": "2011", "dateType": "Issued" }] } + publication_year { 2_011 } identifiers do - [ - { - "identifierType": "publisher ID", - "identifier": "pk-1234", - }, - ] + [{ "identifierType": "publisher ID", "identifier": "pk-1234" }] end version { "1" } rights_list do @@ -207,7 +180,8 @@ "rights" => "Creative Commons Zero v1.0 Universal", "rightsIdentifier" => "cc0-1.0", "rightsIdentifierScheme" => "SPDX", - "rightsUri" => "https://creativecommons.org/publicdomain/zero/1.0/legalcode", + "rightsUri" => + "https://creativecommons.org/publicdomain/zero/1.0/legalcode", "schemeUri" => "https://spdx.org/licenses/", }, ] @@ -365,16 +339,18 @@ sequence(:subj_id) { |n| "http://www.citeulike.org/user/dbogartoit/#{n}" } obj_id { "http://doi.org/10.1371/journal.pmed.0030186" } subj do - { "@id" => "http://www.citeulike.org/user/dbogartoit", + { + "@id" => "http://www.citeulike.org/user/dbogartoit", "@type" => "CreativeWork", "uid" => "http://www.citeulike.org/user/dbogartoit", "author" => [{ "given" => "dbogartoit" }], "name" => "CiteULike bookmarks for user dbogartoit", "publisher" => "CiteULike", "datePublished" => "2006-06-13T16:14:19Z", - "url" => "http://www.citeulike.org/user/dbogartoit" } + "url" => "http://www.citeulike.org/user/dbogartoit", + } end - obj {} + obj { } relation_type_id { "bookmarks" } updated_at { Time.zone.now } occurred_at { Time.zone.now } @@ -383,7 +359,12 @@ source_id { "datacite_related" } source_token { "datacite_related_123" } sequence(:subj_id) { |n| "http://doi.org/10.5061/DRYAD.47SD5e/#{n}" } - subj { { "date_published" => "2006-06-13T16:14:19Z", "registrant_id" => "datacite.datacite" } } + subj do + { + "date_published" => "2006-06-13T16:14:19Z", + "registrant_id" => "datacite.datacite", + } + end obj_id { "http://doi.org/10.5061/DRYAD.47SD5/1" } relation_type_id { "references" } end @@ -462,7 +443,9 @@ source_id { "datacite-usage" } source_token { "5348967fhdjksr3wyui325" } total { 25 } - sequence(:subj_id) { |_n| "https://api.test.datacite.org/report/#{SecureRandom.uuid}" } + sequence(:subj_id) do |_n| + "https://api.test.datacite.org/report/#{SecureRandom.uuid}" + end subj { { "datePublished" => "2006-06-13T16:14:19Z" } } obj { { "date_published" => "2007-06-13T16:14:19Z" } } obj_id { "http://doi.org/10.5061/DRYAD.47SD5/1" } @@ -474,7 +457,9 @@ source_id { "datacite-usage" } source_token { "5348967fhdjksr3wyui325" } total { 10 } - sequence(:subj_id) { |_n| "https://api.test.datacite.org/report/#{SecureRandom.uuid}" } + sequence(:subj_id) do |_n| + "https://api.test.datacite.org/report/#{SecureRandom.uuid}" + end subj { { "datePublished" => "2006-06-13T16:14:19Z" } } obj { { "date_published" => "2007-06-13T16:14:19Z" } } obj_id { "http://doi.org/10.5061/DRYAD.47SD5/1" } @@ -486,9 +471,11 @@ source_id { "datacite-usage" } source_token { "5348967fhdjksr3wyui325" } total { rand(1..100).to_int } - sequence(:subj_id) { |_n| "https://api.test.datacite.org/report/#{SecureRandom.uuid}" } + sequence(:subj_id) do |_n| + "https://api.test.datacite.org/report/#{SecureRandom.uuid}" + end subj { { "datePublished" => "2006-06-13T16:14:19Z" } } - obj {} + obj { } obj_id { "http://doi.org/10.5061/DRYAD.47SD5/1" } relation_type_id { "unique-dataset-investigations-regular" } occurred_at { "2015-06-13T16:14:19Z" } @@ -498,7 +485,9 @@ source_id { "datacite-usage" } source_token { "5348967fhdjksr3wyui325" } total { rand(1..100).to_int } - sequence(:subj_id) { |_n| "https://api.test.datacite.org/report/#{SecureRandom.uuid}" } + sequence(:subj_id) do |_n| + "https://api.test.datacite.org/report/#{SecureRandom.uuid}" + end subj { { "datePublished" => "2006-06-13T16:14:19Z" } } obj { { "date_published" => "2007-06-13T16:14:19Z" } } obj_id { "http://doi.org/10.5061/DRYAD.47SD5/1" } diff --git a/spec/graphql/printout_spec.rb b/spec/graphql/printout_spec.rb index d830cfa7e..2c6c30cc9 100644 --- a/spec/graphql/printout_spec.rb +++ b/spec/graphql/printout_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe LupoSchema do diff --git a/spec/graphql/requests/me_type_spec.rb b/spec/graphql/requests/me_type_spec.rb index b77621d83..ce833005f 100644 --- a/spec/graphql/requests/me_type_spec.rb +++ b/spec/graphql/requests/me_type_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require "rails_helper" # describe MeType, type: :request do diff --git a/spec/graphql/types/actor_item_spec.rb b/spec/graphql/types/actor_item_spec.rb index c1d0255fd..83553dd6d 100644 --- a/spec/graphql/types/actor_item_spec.rb +++ b/spec/graphql/types/actor_item_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ActorItem do @@ -11,62 +13,72 @@ describe "find actor", vcr: true do let(:query) do - %(query { - actor(id: "https://ror.org/013meh722") { + "query { + actor(id: \"https://ror.org/013meh722\") { id type name alternateName } - }) + }" end it "returns actor information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "actor", "id")).to eq("https://ror.org/013meh722") + expect(response.dig("data", "actor", "id")).to eq( + "https://ror.org/013meh722", + ) expect(response.dig("data", "actor", "type")).to eq("Organization") - expect(response.dig("data", "actor", "name")).to eq("University of Cambridge") + expect(response.dig("data", "actor", "name")).to eq( + "University of Cambridge", + ) end end describe "find actor funder", vcr: true do let(:query) do - %(query { - actor(id: "https://doi.org/10.13039/501100003987") { + "query { + actor(id: \"https://doi.org/10.13039/501100003987\") { id type name alternateName } - }) + }" end it "returns actor information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "actor", "id")).to eq("https://doi.org/10.13039/501100003987") + expect(response.dig("data", "actor", "id")).to eq( + "https://doi.org/10.13039/501100003987", + ) expect(response.dig("data", "actor", "type")).to eq("Funder") - expect(response.dig("data", "actor", "name")).to eq("James Baird Fund, University of Cambridge") + expect(response.dig("data", "actor", "name")).to eq( + "James Baird Fund, University of Cambridge", + ) end end describe "find actor person", vcr: true do let(:query) do - %(query { - actor(id: "https://orcid.org/0000-0001-7701-701X") { + "query { + actor(id: \"https://orcid.org/0000-0001-7701-701X\") { id type name alternateName } - }) + }" end it "returns actor information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "actor", "id")).to eq("https://orcid.org/0000-0001-7701-701X") + expect(response.dig("data", "actor", "id")).to eq( + "https://orcid.org/0000-0001-7701-701X", + ) expect(response.dig("data", "actor", "type")).to eq("Person") expect(response.dig("data", "actor", "name")).to eq("Rory O'Bryen") expect(response.dig("data", "actor", "alternateName")).to eq([]) @@ -75,8 +87,8 @@ describe "query actors", vcr: true do let(:query) do - %(query { - actors(query: "Cambridge University") { + "query { + actors(query: \"Cambridge University\") { totalCount nodes { id @@ -85,20 +97,22 @@ alternateName } } - }) + }" end it "returns actor information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "actors", "totalCount")).to eq(1956171) + expect(response.dig("data", "actors", "totalCount")).to eq(1_956_171) expect(response.dig("data", "actors", "nodes").length).to eq(70) organization = response.dig("data", "actors", "nodes", 0) expect(organization.fetch("id")).to eq("https://ror.org/013meh722") expect(organization.fetch("name")).to eq("University of Cambridge") funder = response.dig("data", "actors", "nodes", 20) expect(funder.fetch("id")).to eq("https://doi.org/10.13039/501100009163") - expect(funder.fetch("name")).to eq("Centre of Latin American Studies, University of Cambridge") + expect(funder.fetch("name")).to eq( + "Centre of Latin American Studies, University of Cambridge", + ) person = response.dig("data", "actors", "nodes", 53) expect(person.fetch("id")).to eq("https://orcid.org/0000-0002-0929-8064") expect(person.fetch("name")).to eq("Dr Ahmed Izzidien") diff --git a/spec/graphql/types/address_type_spec.rb b/spec/graphql/types/address_type_spec.rb index 00519c33f..b583748d9 100644 --- a/spec/graphql/types/address_type_spec.rb +++ b/spec/graphql/types/address_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe AddressType do diff --git a/spec/graphql/types/audiovisual_type_spec.rb b/spec/graphql/types/audiovisual_type_spec.rb index 8b7d848c7..918f2b9b0 100644 --- a/spec/graphql/types/audiovisual_type_spec.rb +++ b/spec/graphql/types/audiovisual_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe AudiovisualType do diff --git a/spec/graphql/types/book_chapter_type_spec.rb b/spec/graphql/types/book_chapter_type_spec.rb index 885b2bb9c..b4fb3398f 100644 --- a/spec/graphql/types/book_chapter_type_spec.rb +++ b/spec/graphql/types/book_chapter_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe BookChapterType do @@ -9,7 +11,16 @@ end describe "query book chapters", elasticsearch: true do - let!(:book_chapters) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" }, aasm_state: "findable") } + let!(:book_chapters) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" + }, + aasm_state: "findable", + ) + end before do Doi.import @@ -18,14 +29,14 @@ end let(:query) do - %(query { + "query { bookChapters { totalCount nodes { id } } - }) + }" end it "returns all book chapters" do @@ -33,21 +44,46 @@ expect(response.dig("data", "bookChapters", "totalCount")).to eq(3) expect(response.dig("data", "bookChapters", "nodes").length).to eq(3) - expect(response.dig("data", "bookChapters", "nodes", 0, "id")).to eq(@dois.first.identifier) + expect(response.dig("data", "bookChapters", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) end end describe "query book chapters by person", elasticsearch: true do - let!(:book_chapters) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" }, aasm_state: "findable") } + let!(:book_chapters) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" + }, + aasm_state: "findable", + ) + end let!(:book_chapter) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "BookChapter" + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -56,8 +92,8 @@ end let(:query) do - %(query { - bookChapters(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + bookChapters(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -68,14 +104,16 @@ id } } - }) + }" end it "returns book chapters" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "bookChapters", "totalCount")).to eq(3) - expect(response.dig("data", "bookChapters", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "bookChapters", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "bookChapters", "nodes").length).to eq(3) # expect(response.dig("data", "bookChapters", "nodes", 0, "id")).to eq(@dois.first.identifier) end diff --git a/spec/graphql/types/book_type_spec.rb b/spec/graphql/types/book_type_spec.rb index ed6808c92..7d94e5aa5 100644 --- a/spec/graphql/types/book_type_spec.rb +++ b/spec/graphql/types/book_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe BookType do @@ -9,7 +11,14 @@ end describe "query books", elasticsearch: true do - let!(:books) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, aasm_state: "findable") } + let!(:books) do + create_list( + :doi, + 3, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, + aasm_state: "findable", + ) + end before do Doi.import @@ -18,14 +27,14 @@ end let(:query) do - %(query { + "query { books { totalCount nodes { id } } - }) + }" end it "returns all books" do @@ -33,21 +42,42 @@ expect(response.dig("data", "books", "totalCount")).to eq(3) expect(response.dig("data", "books", "nodes").length).to eq(3) - expect(response.dig("data", "books", "nodes", 0, "id")).to eq(@dois.first.identifier) + expect(response.dig("data", "books", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) end end describe "query books by person", elasticsearch: true do - let!(:books) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, aasm_state: "findable") } + let!(:books) do + create_list( + :doi, + 3, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, + aasm_state: "findable", + ) + end let!(:book) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Book" }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -56,8 +86,8 @@ end let(:query) do - %(query { - books(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + books(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -68,14 +98,16 @@ id } } - }) + }" end it "returns books" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "books", "totalCount")).to eq(3) - expect(response.dig("data", "books", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "books", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "books", "nodes").length).to eq(3) # expect(response.dig("data", "books", "nodes", 0, "id")).to eq(@dois.first.identifier) end diff --git a/spec/graphql/types/collection_type_spec.rb b/spec/graphql/types/collection_type_spec.rb index 7fc1c7404..41a7d3da2 100644 --- a/spec/graphql/types/collection_type_spec.rb +++ b/spec/graphql/types/collection_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe CollectionType do diff --git a/spec/graphql/types/conference_paper_type.spec b/spec/graphql/types/conference_paper_type.spec index 0474278d5..157558c4c 100644 --- a/spec/graphql/types/conference_paper_type.spec +++ b/spec/graphql/types/conference_paper_type.spec @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ConferencePaperType do diff --git a/spec/graphql/types/contributor_type_spec.rb b/spec/graphql/types/contributor_type_spec.rb index 6313b0fc7..bb8b6b4aa 100644 --- a/spec/graphql/types/contributor_type_spec.rb +++ b/spec/graphql/types/contributor_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ContributorType do diff --git a/spec/graphql/types/country_type_spec.rb b/spec/graphql/types/country_type_spec.rb index 66eb36abd..9c003cac6 100644 --- a/spec/graphql/types/country_type_spec.rb +++ b/spec/graphql/types/country_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe CountryType do diff --git a/spec/graphql/types/creator_type_spec.rb b/spec/graphql/types/creator_type_spec.rb index d2659ed16..286d23741 100644 --- a/spec/graphql/types/creator_type_spec.rb +++ b/spec/graphql/types/creator_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe CreatorType do diff --git a/spec/graphql/types/data_catalog_type_spec.rb b/spec/graphql/types/data_catalog_type_spec.rb index 0d5a876cb..47ae4ee7c 100644 --- a/spec/graphql/types/data_catalog_type_spec.rb +++ b/spec/graphql/types/data_catalog_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataCatalogType do @@ -14,7 +16,9 @@ it { is_expected.to have_field(:citationCount).of_type("Int") } it { is_expected.to have_field(:viewCount).of_type("Int") } it { is_expected.to have_field(:downloadCount).of_type("Int") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end end # describe "find data_catalog", elasticsearch: true, vcr: true do @@ -94,21 +98,32 @@ describe "query data_catalogs", elasticsearch: true, vcr: true do let!(:dois) { create_list(:doi, 3) } let!(:doi) do - create(:doi, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }]) + ) end before do @@ -117,8 +132,8 @@ end let(:query) do - %(query { - dataCatalogs(query: "Dataverse", first: 10, after: "OA") { + "query { + dataCatalogs(query: \"Dataverse\", first: 10, after: \"OA\") { totalCount pageInfo { endCursor @@ -140,24 +155,36 @@ } } } - }) + }" end it "returns data_catalog information" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dataCatalogs", "totalCount")).to eq(85) - expect(response.dig("data", "dataCatalogs", "pageInfo", "endCursor")).to eq("OQ") - expect(response.dig("data", "dataCatalogs", "pageInfo", "hasNextPage")).to eq true + expect( + response.dig("data", "dataCatalogs", "pageInfo", "endCursor"), + ).to eq("OQ") + expect( + response.dig("data", "dataCatalogs", "pageInfo", "hasNextPage"), + ).to eq true expect(response.dig("data", "dataCatalogs", "nodes").length).to eq(10) data_catalog = response.dig("data", "dataCatalogs", "nodes", 0) expect(data_catalog.fetch("id")).to eq("https://doi.org/10.17616/r3bw5r") - expect(data_catalog.fetch("name")).to eq("UCLA Social Science Data Archive Dataverse") - expect(data_catalog.fetch("alternateName")).to eq(["SSDA Dataverse\r\nUCLA Library Data Science Center"]) - expect(data_catalog.fetch("description")).to start_with("The Social Science Data Archive is still active and maintained as part of the UCLA Library") + expect(data_catalog.fetch("name")).to eq( + "UCLA Social Science Data Archive Dataverse", + ) + expect(data_catalog.fetch("alternateName")).to eq( + ["SSDA Dataverse\r\nUCLA Library Data Science Center"], + ) + expect(data_catalog.fetch("description")).to start_with( + "The Social Science Data Archive is still active and maintained as part of the UCLA Library", + ) expect(data_catalog.fetch("certificates")).to be_empty - expect(data_catalog.fetch("softwareApplication")).to eq([{ "name" => "DataVerse", "softwareVersion" => nil, "url" => nil }]) + expect(data_catalog.fetch("softwareApplication")).to eq( + [{ "name" => "DataVerse", "softwareVersion" => nil, "url" => nil }], + ) end end end diff --git a/spec/graphql/types/data_management_plan_type_spec.rb b/spec/graphql/types/data_management_plan_type_spec.rb index 33c302fe3..0bc892935 100644 --- a/spec/graphql/types/data_management_plan_type_spec.rb +++ b/spec/graphql/types/data_management_plan_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataManagementPlanType do @@ -9,7 +11,18 @@ end describe "query data_management_plans", elasticsearch: true do - let!(:data_management_plans) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, language: "de", aasm_state: "findable") } + let!(:data_management_plans) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + language: "de", + aasm_state: "findable", + ) + end before do Doi.import @@ -18,7 +31,7 @@ end let(:query) do - %(query { + "query { dataManagementPlans { totalCount registrationAgencies { @@ -44,28 +57,54 @@ } } } - }) + }" end it "returns all data_management_plans" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dataManagementPlans", "totalCount")).to eq(2) - expect(response.dig("data", "dataManagementPlans", "languages")).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) - expect(response.dig("data", "dataManagementPlans", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) - expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq(2) - expect(response.dig("data", "dataManagementPlans", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect(response.dig("data", "dataManagementPlans", "languages")).to eq( + [{ "count" => 2, "id" => "de", "title" => "German" }], + ) + expect(response.dig("data", "dataManagementPlans", "licenses")).to eq( + [{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }], + ) + expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq( + 2, + ) + expect( + response.dig( + "data", + "dataManagementPlans", + "nodes", + 0, + "registrationAgency", + ), + ).to eq("id" => "datacite", "name" => "DataCite") end end - describe "query data_management_plans from an organization", elasticsearch: true, vcr: true do + describe "query data_management_plans from an organization", + elasticsearch: true, vcr: true do let!(:data_management_plans) do - create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, language: "de", aasm_state: "findable", funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100000780", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "European Commission", - }]) + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + language: "de", + aasm_state: "findable", + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100000780", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "European Commission", + }, + ], + ) end before do @@ -75,8 +114,8 @@ end let(:query) do - %(query { - organization(id: "https://ror.org/00k4n6c32") { + "query { + organization(id: \"https://ror.org/00k4n6c32\") { name dataManagementPlans { totalCount @@ -109,31 +148,92 @@ } } } - }) + }" end it "returns all data_management_plans" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "name")).to eq("European Commission") - expect(response.dig("data", "organization", "dataManagementPlans", "totalCount")).to eq(2) - expect(response.dig("data", "organization", "dataManagementPlans", "languages")).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) - expect(response.dig("data", "organization", "dataManagementPlans", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) - expect(response.dig("data", "organization", "dataManagementPlans", "nodes").length).to eq(2) - expect(response.dig("data", "organization", "dataManagementPlans", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") - expect(response.dig("data", "organization", "dataManagementPlans", "nodes", 0, "types")).to eq("resourceType" => "Data Management Plan", "resourceTypeGeneral" => "Text", "schemaOrg" => "ScholarlyArticle") + expect(response.dig("data", "organization", "name")).to eq( + "European Commission", + ) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "totalCount", + ), + ).to eq(2) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "languages", + ), + ).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) + expect( + response.dig("data", "organization", "dataManagementPlans", "licenses"), + ).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) + expect( + response.dig("data", "organization", "dataManagementPlans", "nodes"). + length, + ).to eq(2) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "nodes", + 0, + "registrationAgency", + ), + ).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "nodes", + 0, + "types", + ), + ).to eq( + "resourceType" => "Data Management Plan", + "resourceTypeGeneral" => "Text", + "schemaOrg" => "ScholarlyArticle", + ) end end - describe "query data_management_plans from an organization as contributor name identifier", elasticsearch: true, vcr: true do + describe "query data_management_plans from an organization as contributor name identifier", + elasticsearch: true, vcr: true do let!(:data_management_plans) do - create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, language: "de", aasm_state: "findable", contributors: - [{ - "name" => "European Commission", - "contributorType" => "HostingInstitution", - "nameIdentifiers" => [{ "nameIdentifier" => "https://ror.org/00k4n6c32", "nameIdentifierScheme" => "ROR", "schemeUri" => "https://ror.org" }], - "nameType" => "Organizational", - }]) + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + language: "de", + aasm_state: "findable", + contributors: [ + { + "name" => "European Commission", + "contributorType" => "HostingInstitution", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://ror.org/00k4n6c32", + "nameIdentifierScheme" => "ROR", + "schemeUri" => "https://ror.org", + }, + ], + "nameType" => "Organizational", + }, + ], + ) end before do @@ -143,8 +243,8 @@ end let(:query) do - %(query { - organization(id: "https://ror.org/00k4n6c32") { + "query { + organization(id: \"https://ror.org/00k4n6c32\") { name dataManagementPlans { totalCount @@ -182,25 +282,96 @@ } } } - }) + }" end it "returns all data_management_plans" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "name")).to eq("European Commission") - expect(response.dig("data", "organization", "dataManagementPlans", "totalCount")).to eq(2) - expect(response.dig("data", "organization", "dataManagementPlans", "languages")).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) - expect(response.dig("data", "organization", "dataManagementPlans", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) - expect(response.dig("data", "organization", "dataManagementPlans", "nodes").length).to eq(2) - expect(response.dig("data", "organization", "dataManagementPlans", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") - expect(response.dig("data", "organization", "dataManagementPlans", "nodes", 0, "types")).to eq("resourceType" => "Data Management Plan", "resourceTypeGeneral" => "Text", "schemaOrg" => "ScholarlyArticle") - expect(response.dig("data", "organization", "dataManagementPlans", "nodes", 0, "contributors")).to eq([{ "contributorType" => "HostingInstitution", "id" => "https://ror.org/00k4n6c32", "name" => "European Commission" }]) + expect(response.dig("data", "organization", "name")).to eq( + "European Commission", + ) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "totalCount", + ), + ).to eq(2) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "languages", + ), + ).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) + expect( + response.dig("data", "organization", "dataManagementPlans", "licenses"), + ).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) + expect( + response.dig("data", "organization", "dataManagementPlans", "nodes"). + length, + ).to eq(2) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "nodes", + 0, + "registrationAgency", + ), + ).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "nodes", + 0, + "types", + ), + ).to eq( + "resourceType" => "Data Management Plan", + "resourceTypeGeneral" => "Text", + "schemaOrg" => "ScholarlyArticle", + ) + expect( + response.dig( + "data", + "organization", + "dataManagementPlans", + "nodes", + 0, + "contributors", + ), + ).to eq( + [ + { + "contributorType" => "HostingInstitution", + "id" => "https://ror.org/00k4n6c32", + "name" => "European Commission", + }, + ], + ) end end describe "query data_management_plans by language", elasticsearch: true do - let!(:data_management_plans) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, language: "de", aasm_state: "findable") } + let!(:data_management_plans) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + language: "de", + aasm_state: "findable", + ) + end before do Doi.import @@ -209,8 +380,8 @@ end let(:query) do - %(query { - dataManagementPlans(language: "de") { + "query { + dataManagementPlans(language: \"de\") { totalCount registrationAgencies { id @@ -244,25 +415,59 @@ } } } - }) + }" end it "returns all data_management_plans" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dataManagementPlans", "totalCount")).to eq(2) - expect(response.dig("data", "dataManagementPlans", "registrationAgencies")).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "dataManagementPlans", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) - expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq(2) - expect(response.dig("data", "dataManagementPlans", "nodes", 0, "rights")).to eq([{ "rights" => "Creative Commons Zero v1.0 Universal", - "rightsIdentifier" => "cc0-1.0", - "rightsUri" => "https://creativecommons.org/publicdomain/zero/1.0/legalcode" }]) - expect(response.dig("data", "dataManagementPlans", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig("data", "dataManagementPlans", "registrationAgencies"), + ).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "dataManagementPlans", "licenses")).to eq( + [{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }], + ) + expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq( + 2, + ) + expect( + response.dig("data", "dataManagementPlans", "nodes", 0, "rights"), + ).to eq( + [ + { + "rights" => "Creative Commons Zero v1.0 Universal", + "rightsIdentifier" => "cc0-1.0", + "rightsUri" => + "https://creativecommons.org/publicdomain/zero/1.0/legalcode", + }, + ], + ) + expect( + response.dig( + "data", + "dataManagementPlans", + "nodes", + 0, + "registrationAgency", + ), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query data_management_plans by license", elasticsearch: true do - let!(:data_management_plans) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, language: "de", aasm_state: "findable") } + let!(:data_management_plans) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + language: "de", + aasm_state: "findable", + ) + end before do Doi.import @@ -271,8 +476,8 @@ end let(:query) do - %(query { - dataManagementPlans(license: "cc0-1.0") { + "query { + dataManagementPlans(license: \"cc0-1.0\") { totalCount registrationAgencies { id @@ -306,31 +511,70 @@ } } } - }) + }" end it "returns all data_management_plans" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dataManagementPlans", "totalCount")).to eq(2) - expect(response.dig("data", "dataManagementPlans", "registrationAgencies")).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "dataManagementPlans", "languages")).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) - expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq(2) - expect(response.dig("data", "dataManagementPlans", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig("data", "dataManagementPlans", "registrationAgencies"), + ).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "dataManagementPlans", "languages")).to eq( + [{ "count" => 2, "id" => "de", "title" => "German" }], + ) + expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq( + 2, + ) + expect( + response.dig( + "data", + "dataManagementPlans", + "nodes", + 0, + "registrationAgency", + ), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query data_management_plans by person", elasticsearch: true do - let!(:data_management_plans) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, aasm_state: "findable") } + let!(:data_management_plans) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + aasm_state: "findable", + ) + end let!(:data_management_plan) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -339,8 +583,8 @@ end let(:query) do - %(query { - dataManagementPlans(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + dataManagementPlans(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -351,25 +595,70 @@ id } } - }) + }" end it "returns data_management_plans" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dataManagementPlans", "totalCount")).to eq(3) - expect(response.dig("data", "dataManagementPlans", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq(3) + expect(response.dig("data", "dataManagementPlans", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) + expect(response.dig("data", "dataManagementPlans", "nodes").length).to eq( + 3, + ) end end - describe "find data management plan with citations", elasticsearch: true, vcr: true do + describe "find data management plan with citations", + elasticsearch: true, vcr: true do let(:client) { create(:client) } - let(:doi) { create(:doi, client: client, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Data Management Plan" }, aasm_state: "findable") } - let(:source_doi) { create(:doi, client: client, types: { "resourceTypeGeneral" => "Dataset" }, aasm_state: "findable") } - let(:source_doi2) { create(:doi, client: client, types: { "resourceTypeGeneral" => "Software" }, aasm_state: "findable") } - let!(:citation_event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } - let!(:citation_event2) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi2.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } + let(:doi) do + create( + :doi, + client: client, + types: { + "resourceTypeGeneral" => "Text", + "resourceType" => "Data Management Plan", + }, + aasm_state: "findable", + ) + end + let(:source_doi) do + create( + :doi, + client: client, + types: { "resourceTypeGeneral" => "Dataset" }, + aasm_state: "findable", + ) + end + let(:source_doi2) do + create( + :doi, + client: client, + types: { "resourceTypeGeneral" => "Software" }, + aasm_state: "findable", + ) + end + let!(:citation_event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:citation_event2) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi2.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end before do Doi.import @@ -377,15 +666,17 @@ end let(:query) do - %(query { - dataManagementPlan(id: "https://doi.org/#{doi.doi}") { + "query { + dataManagementPlan(id: \"https://doi.org/#{ + doi.doi + }\") { id partOf { nodes { id } } - citations(resourceTypeId: "Dataset") { + citations(resourceTypeId: \"Dataset\") { totalCount nodes { id @@ -393,14 +684,18 @@ } } } - }) + }" end it "returns citations" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "dataManagementPlan", "id")).to eq("https://handle.test.datacite.org/#{doi.doi.downcase}") - expect(response.dig("data", "dataManagementPlan", "citations", "totalCount")).to eq(1) + expect(response.dig("data", "dataManagementPlan", "id")).to eq( + "https://handle.test.datacite.org/#{doi.doi.downcase}", + ) + expect( + response.dig("data", "dataManagementPlan", "citations", "totalCount"), + ).to eq(1) end end end diff --git a/spec/graphql/types/dataset_type_spec.rb b/spec/graphql/types/dataset_type_spec.rb index fd148395c..1d1d6ccce 100644 --- a/spec/graphql/types/dataset_type_spec.rb +++ b/spec/graphql/types/dataset_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DatasetType do @@ -18,7 +20,7 @@ end let(:query) do - %(query { + "query { datasets(first: 20) { totalCount pageInfo { @@ -30,66 +32,108 @@ schemaOrg } } - }) + }" end it "returns all datasets" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) - expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(JSON.parse(response.dig("data", "datasets", "nodes", 0, "schemaOrg"))["@context"]).to eq("http://schema.org") - expect(JSON.parse(response.dig("data", "datasets", "nodes", 0, "schemaOrg"))["@type"]).to eq("Dataset") + expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) + expect( + JSON.parse(response.dig("data", "datasets", "nodes", 0, "schemaOrg"))[ + "@context" + ], + ).to eq("http://schema.org") + expect( + JSON.parse(response.dig("data", "datasets", "nodes", 0, "schemaOrg"))[ + "@type" + ], + ).to eq("Dataset") end end describe "query datasets by person", elasticsearch: true do let!(:datasets) do - create_list(:doi, 3, aasm_state: "findable", creators: - [{ - "nameType": "Personal", - "name": "Renaud, François", - "givenName": "François", - "familyName": "Renaud", "nameIdentifiers": - [{ - "nameIdentifier": "https://orcid.org/0000-0003-1419-2405", - "nameIdentifierScheme": "ORCID", - "schemeUri": "https://orcid.org", - }], - "affiliation": - [{ - "name": "DataCite", - "affiliationIdentifier": "https://ror.org/04wxnsj81", - "affiliationIdentifierScheme": "ROR", - }] - }, - { - "nameType": "Organizational", - "name": "Crossref", - "nameIdentifiers": - [{ - "nameIdentifier": "https://ror.org/02twcfp32", - "nameIdentifierScheme": "ROR", - "schemeUri": "https://ror.org", - }], - }]) + create_list( + :doi, + 3, + aasm_state: "findable", + creators: [ + { + "nameType": "Personal", + "name": "Renaud, François", + "givenName": "François", + "familyName": "Renaud", + "nameIdentifiers": [ + { + "nameIdentifier": "https://orcid.org/0000-0003-1419-2405", + "nameIdentifierScheme": "ORCID", + "schemeUri": "https://orcid.org", + }, + ], + "affiliation": [ + { + "name": "DataCite", + "affiliationIdentifier": "https://ror.org/04wxnsj81", + "affiliationIdentifierScheme": "ROR", + }, + ], + }, + { + "nameType": "Organizational", + "name": "Crossref", + "nameIdentifiers": [ + { + "nameIdentifier": "https://ror.org/02twcfp32", + "nameIdentifierScheme": "ROR", + "schemeUri": "https://ror.org", + }, + ], + }, + ], + ) end let!(:dataset) do - create(:doi, doi: "10.14454/4k3m-nyvg", url: "https://example.org", aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }], identifiers: [ - { "identifier" => "pk-1235", "identifierType" => "publisher ID" }, - { "identifier" => "https://example.org", "identifierType" => "URL" }, - { "identifier" => "10.14454/4k3m-nyvg", "identifierType" => "DOI" }, - ]) + create( + :doi, + doi: "10.14454/4k3m-nyvg", + url: "https://example.org", + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + identifiers: [ + { "identifier" => "pk-1235", "identifierType" => "publisher ID" }, + { "identifier" => "https://example.org", "identifierType" => "URL" }, + { "identifier" => "10.14454/4k3m-nyvg", "identifierType" => "DOI" }, + ], + ) end before do Doi.import @@ -98,8 +142,8 @@ end let(:query) do - %(query { - datasets(userId: "https://orcid.org/0000-0003-3484-6875") { + "query { + datasets(userId: \"https://orcid.org/0000-0003-3484-6875\") { totalCount published { id @@ -123,34 +167,51 @@ } } } - }) + }" end it "returns datasets" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(1) - expect(response.dig("data", "datasets", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "datasets", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) # expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois[2].uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(1) # expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "datasets", "nodes", 0, "creators")).to eq([{ "id" => "https://orcid.org/0000-0003-3484-6875", - "name" => "Garza, Kristian", - "type" => "Person" }]) - expect(response.dig("data", "datasets", "nodes", 0, "identifiers")).to eq([{ "identifier" => "pk-1235", "identifierType" => "publisher ID" }]) + expect(response.dig("data", "datasets", "nodes", 0, "creators")).to eq( + [ + { + "id" => "https://orcid.org/0000-0003-3484-6875", + "name" => "Garza, Kristian", + "type" => "Person", + }, + ], + ) + expect(response.dig("data", "datasets", "nodes", 0, "identifiers")).to eq( + [{ "identifier" => "pk-1235", "identifierType" => "publisher ID" }], + ) end end describe "query datasets by field of science", elasticsearch: true do let!(:datasets) { create_list(:doi, 3, aasm_state: "findable") } let!(:dataset) do - create(:doi, aasm_state: "findable", subjects: - [{ - "subject": "FOS: Computer and information sciences", - "schemeUri": "http://www.oecd.org/science/inno/38235147.pdf", - "subjectScheme": "Fields of Science and Technology (FOS)", - }]) + create( + :doi, + aasm_state: "findable", + subjects: [ + { + "subject": "FOS: Computer and information sciences", + "schemeUri": "http://www.oecd.org/science/inno/38235147.pdf", + "subjectScheme": "Fields of Science and Technology (FOS)", + }, + ], + ) end before do Doi.import @@ -158,8 +219,8 @@ end let(:query) do - %(query { - datasets(fieldOfScience: "computer_and_information_sciences") { + "query { + datasets(fieldOfScience: \"computer_and_information_sciences\") { totalCount published { id @@ -183,20 +244,49 @@ } } } - }) + }" end it "returns datasets" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(1) - expect(response.dig("data", "datasets", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "datasets", "fieldsOfScience")).to eq([{ "count" => 1, "id" => "computer_and_information_sciences", "title" => "Computer and information sciences" }]) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(dataset.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect(response.dig("data", "datasets", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) + expect(response.dig("data", "datasets", "fieldsOfScience")).to eq( + [ + { + "count" => 1, + "id" => "computer_and_information_sciences", + "title" => "Computer and information sciences", + }, + ], + ) + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(dataset.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(1) - expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq(dataset.identifier) - expect(response.dig("data", "datasets", "nodes", 0, "fieldsOfScience")).to eq([{ "id" => "computer_and_information_sciences", "name" => "Computer and information sciences" }]) + expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq( + dataset.identifier, + ) + expect( + response.dig("data", "datasets", "nodes", 0, "fieldsOfScience"), + ).to eq( + [ + { + "id" => "computer_and_information_sciences", + "name" => "Computer and information sciences", + }, + ], + ) end end @@ -205,8 +295,24 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:citation_event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } - let!(:citation_event2) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi2.doi}", relation_type_id: "is-referenced-by", occurred_at: "2016-06-13T16:14:19Z") } + let!(:citation_event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:citation_event2) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi2.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2016-06-13T16:14:19Z", + ) + end before do Doi.import @@ -216,7 +322,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -230,7 +336,7 @@ year total } - citations(resourceTypeId: "Dataset") { + citations(resourceTypeId: \"Dataset\") { totalCount nodes { id @@ -239,20 +345,33 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 0, "citationCount")).to eq(2) # expect(response.dig("data", "datasets", "nodes", 0, "citationsOverTime")).to eq([{"total"=>1, "year"=>2015}, {"total"=>1, "year"=>2016}]) - expect(response.dig("data", "datasets", "nodes", 0, "citations", "totalCount")).to eq(2) - expect(response.dig("data", "datasets", "nodes", 0, "citations", "nodes").length).to eq(2) + expect( + response.dig("data", "datasets", "nodes", 0, "citations", "totalCount"), + ).to eq(2) + expect( + response.dig("data", "datasets", "nodes", 0, "citations", "nodes"). + length, + ).to eq(2) # expect(response.dig("data", "datasets", "nodes", 0, "citations", "nodes", 0)).to eq("id"=>"https://handle.test.datacite.org/#{source_doi.uid}", "publicationYear"=>2011) end end @@ -262,8 +381,20 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:usage_event) { create(:event_for_datacite_usage, obj_id: "https://doi.org/#{source_doi.doi}", occurred_at: "2015-06-13T16:14:19Z") } - let!(:usage_event2) { create(:event_for_datacite_usage, obj_id: "https://doi.org/#{source_doi2.doi}", occurred_at: "2016-06-13T16:14:19Z") } + let!(:usage_event) do + create( + :event_for_datacite_usage, + obj_id: "https://doi.org/#{source_doi.doi}", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:usage_event2) do + create( + :event_for_datacite_usage, + obj_id: "https://doi.org/#{source_doi2.doi}", + occurred_at: "2016-06-13T16:14:19Z", + ) + end before do Doi.import @@ -273,7 +404,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -289,15 +420,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(2) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(2) # expect(response.dig("data", "datasets", "nodes", 0, "viewCount")).to be > 1 # expect(response.dig("data", "datasets", "nodes", 0, "viewsOverTime").length).to be >= 1 @@ -313,8 +452,22 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:target_doi) { create(:doi, aasm_state: "findable") } let(:target_doi2) { create(:doi, aasm_state: "findable") } - let!(:reference_event) { create(:event_for_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{target_doi.doi}", relation_type_id: "references") } - let!(:reference_event2) { create(:event_for_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{target_doi2.doi}", relation_type_id: "references") } + let!(:reference_event) do + create( + :event_for_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{target_doi.doi}", + relation_type_id: "references", + ) + end + let!(:reference_event2) do + create( + :event_for_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{target_doi2.doi}", + relation_type_id: "references", + ) + end before do Doi.import @@ -324,7 +477,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -343,15 +496,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 0, "referenceCount")).to eq(2) # expect(response.dig("data", "datasets", "nodes", 0, "references", "totalCount")).to eq(2) @@ -365,7 +526,13 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:target_doi) { create(:doi, client: client, aasm_state: "findable") } let!(:target_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:version_event) { create(:event_for_datacite_versions, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{target_doi.doi}") } + let!(:version_event) do + create( + :event_for_datacite_versions, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{target_doi.doi}", + ) + end before do Doi.import @@ -375,7 +542,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -394,15 +561,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 1, "versionCount")).to eq(1) # expect(response.dig("data", "datasets", "nodes", 1, "versions", "totalCount")).to eq(1) @@ -416,7 +591,13 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let!(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:part_of_events) { create(:event_for_datacite_version_of, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}") } + let!(:part_of_events) do + create( + :event_for_datacite_version_of, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + ) + end before do Doi.import @@ -426,7 +607,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -445,15 +626,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 1, "versionOfCount")).to eq(1) # expect(response.dig("data", "datasets", "nodes", 1, "versionOf", "totalCount")).to eq(1) @@ -467,7 +656,14 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:target_doi) { create(:doi, client: client, aasm_state: "findable") } let!(:target_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:part_events) { create(:event_for_datacite_parts, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{target_doi.doi}", relation_type_id: "has-part") } + let!(:part_events) do + create( + :event_for_datacite_parts, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{target_doi.doi}", + relation_type_id: "has-part", + ) + end before do Doi.import @@ -477,7 +673,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -500,15 +696,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 0, "partCount")).to eq(1) # expect(response.dig("data", "datasets", "nodes", 0, "parts", "totalCount")).to eq(1) @@ -524,7 +728,14 @@ let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let!(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:part_of_events) { create(:event_for_datacite_part_of, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-part-of") } + let!(:part_of_events) do + create( + :event_for_datacite_part_of, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-part-of", + ) + end before do Doi.import @@ -534,7 +745,7 @@ end let(:query) do - %(query { + "query { datasets { totalCount pageInfo { @@ -557,15 +768,23 @@ } } } - }) + }" end it "returns all datasets with counts" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false + expect( + Base64.urlsafe_decode64( + response.dig("data", "datasets", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "datasets", "pageInfo", "hasNextPage"), + ).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) # expect(response.dig("data", "datasets", "nodes", 1, "partOfCount")).to eq(1) # expect(response.dig("data", "datasets", "nodes", 1, "partOf", "totalCount")).to eq(1) diff --git a/spec/graphql/types/date_type_spec.rb b/spec/graphql/types/date_type_spec.rb index 928ccbdad..16f7ae9ab 100644 --- a/spec/graphql/types/date_type_spec.rb +++ b/spec/graphql/types/date_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DateType do diff --git a/spec/graphql/types/defined_term_type_spec.rb b/spec/graphql/types/defined_term_type_spec.rb index 9f844ca51..80de8af5a 100644 --- a/spec/graphql/types/defined_term_type_spec.rb +++ b/spec/graphql/types/defined_term_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DefinedTermType do diff --git a/spec/graphql/types/description_type_spec.rb b/spec/graphql/types/description_type_spec.rb index b18008eb0..fd8f5a939 100644 --- a/spec/graphql/types/description_type_spec.rb +++ b/spec/graphql/types/description_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DescriptionType do diff --git a/spec/graphql/types/dissertation_type_spec.rb b/spec/graphql/types/dissertation_type_spec.rb index 91c8be008..1a908f975 100644 --- a/spec/graphql/types/dissertation_type_spec.rb +++ b/spec/graphql/types/dissertation_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DissertationType do @@ -9,8 +11,26 @@ end describe "query dissertations", elasticsearch: true do - let!(:datacite_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, language: "de", aasm_state: "findable") } - let!(:crossref_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" }, agency: "Crossref", aasm_state: "findable") } + let!(:datacite_dissertations) do + create_list( + :doi, + 2, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, + language: "de", + aasm_state: "findable", + ) + end + let!(:crossref_dissertations) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" + }, + agency: "Crossref", + aasm_state: "findable", + ) + end before do Doi.import @@ -19,8 +39,8 @@ end let(:query) do - %(query { - dissertations(registrationAgency: "datacite") { + "query { + dissertations(registrationAgency: \"datacite\") { totalCount registrationAgencies { id @@ -40,24 +60,48 @@ } } } - }) + }" end it "returns all dissertations" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dissertations", "totalCount")).to eq(2) - expect(response.dig("data", "dissertations", "registrationAgencies")).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "dissertations", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) + expect( + response.dig("data", "dissertations", "registrationAgencies"), + ).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "dissertations", "licenses")).to eq( + [{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }], + ) expect(response.dig("data", "dissertations", "nodes").length).to eq(2) # expect(response.dig("data", "dissertations", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "dissertations", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig("data", "dissertations", "nodes", 0, "registrationAgency"), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query dissertations by license", elasticsearch: true do - let!(:datacite_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, aasm_state: "findable") } - let!(:crossref_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" }, agency: "Crossref", rights_list: [], aasm_state: "findable") } + let!(:datacite_dissertations) do + create_list( + :doi, + 2, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, + aasm_state: "findable", + ) + end + let!(:crossref_dissertations) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" + }, + agency: "Crossref", + rights_list: [], + aasm_state: "findable", + ) + end before do Doi.import @@ -66,8 +110,8 @@ end let(:query) do - %(query { - dissertations(license: "cc0-1.0") { + "query { + dissertations(license: \"cc0-1.0\") { totalCount registrationAgencies { id @@ -92,27 +136,59 @@ } } } - }) + }" end it "returns all dissertations" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dissertations", "totalCount")).to eq(2) - expect(response.dig("data", "dissertations", "registrationAgencies")).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "dissertations", "licenses")).to eq([{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) + expect( + response.dig("data", "dissertations", "registrationAgencies"), + ).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "dissertations", "licenses")).to eq( + [{ "count" => 2, "id" => "cc0-1.0", "title" => "CC0-1.0" }], + ) expect(response.dig("data", "dissertations", "nodes").length).to eq(2) # expect(response.dig("data", "dissertations", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "dissertations", "nodes", 0, "rights")).to eq([{ "rights" => "Creative Commons Zero v1.0 Universal", - "rightsIdentifier" => "cc0-1.0", - "rightsUri" => "https://creativecommons.org/publicdomain/zero/1.0/legalcode" }]) - expect(response.dig("data", "dissertations", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect(response.dig("data", "dissertations", "nodes", 0, "rights")).to eq( + [ + { + "rights" => "Creative Commons Zero v1.0 Universal", + "rightsIdentifier" => "cc0-1.0", + "rightsUri" => + "https://creativecommons.org/publicdomain/zero/1.0/legalcode", + }, + ], + ) + expect( + response.dig("data", "dissertations", "nodes", 0, "registrationAgency"), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query dissertations by license", elasticsearch: true do - let!(:datacite_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, language: "de", aasm_state: "findable") } - let!(:crossref_dissertations) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" }, agency: "Crossref", rights_list: [], aasm_state: "findable") } + let!(:datacite_dissertations) do + create_list( + :doi, + 2, + types: { "resourceTypeGeneral" => "Text", "resourceType" => "Thesis" }, + language: "de", + aasm_state: "findable", + ) + end + let!(:crossref_dissertations) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" + }, + agency: "Crossref", + rights_list: [], + aasm_state: "findable", + ) + end before do Doi.import @@ -121,8 +197,8 @@ end let(:query) do - %(query { - dissertations(language: "de") { + "query { + dissertations(language: \"de\") { totalCount registrationAgencies { id @@ -146,40 +222,75 @@ } } } - }) + }" end it "returns all dissertations" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dissertations", "totalCount")).to eq(2) - expect(response.dig("data", "dissertations", "registrationAgencies")).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "dissertations", "languages")).to eq([{ "count" => 2, "id" => "de", "title" => "German" }]) + expect( + response.dig("data", "dissertations", "registrationAgencies"), + ).to eq([{ "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "dissertations", "languages")).to eq( + [{ "count" => 2, "id" => "de", "title" => "German" }], + ) expect(response.dig("data", "dissertations", "nodes").length).to eq(2) # expect(response.dig("data", "dissertations", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "dissertations", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig("data", "dissertations", "nodes", 0, "registrationAgency"), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query dissertations by person", elasticsearch: true do let!(:dissertations) do - create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" }, aasm_state: "findable", contributors: - [{ - "name" => "Freie Universität Berlin", - "contributorType" => "HostingInstitution", - "nameIdentifiers" => [{ "nameIdentifier" => "https://ror.org/046ak2485", "nameIdentifierScheme" => "ROR", "schemeUri" => "https://ror.org" }], - "nameType" => "Organizational", - }]) + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" + }, + aasm_state: "findable", + contributors: [ + { + "name" => "Freie Universität Berlin", + "contributorType" => "HostingInstitution", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://ror.org/046ak2485", + "nameIdentifierScheme" => "ROR", + "schemeUri" => "https://ror.org", + }, + ], + "nameType" => "Organizational", + }, + ], + ) end let!(:dissertation) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Dissertation" + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -188,8 +299,8 @@ end let(:query) do - %(query { - dissertations(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + dissertations(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -198,13 +309,13 @@ } nodes { id - dataManagers: contributors(contributorType: "DataManager") { + dataManagers: contributors(contributorType: \"DataManager\") { id type name contributorType } - hostingInstitution: contributors(contributorType: "HostingInstitution") { + hostingInstitution: contributors(contributorType: \"HostingInstitution\") { id type name @@ -218,24 +329,44 @@ } } } - }) + }" end it "returns dissertations" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "dissertations", "totalCount")).to eq(3) - expect(response.dig("data", "dissertations", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "dissertations", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "dissertations", "nodes").length).to eq(3) - expect(response.dig("data", "dissertations", "nodes", 0, "dataManagers")).to eq([]) - expect(response.dig("data", "dissertations", "nodes", 0, "hostingInstitution")).to eq([{ "contributorType" => "HostingInstitution", - "id" => "https://ror.org/046ak2485", - "name" => "Freie Universität Berlin", - "type" => "Organization" }]) - expect(response.dig("data", "dissertations", "nodes", 0, "contributors")).to eq([{ "contributorType" => "HostingInstitution", - "id" => "https://ror.org/046ak2485", - "name" => "Freie Universität Berlin", - "type" => "Organization" }]) + expect( + response.dig("data", "dissertations", "nodes", 0, "dataManagers"), + ).to eq([]) + expect( + response.dig("data", "dissertations", "nodes", 0, "hostingInstitution"), + ).to eq( + [ + { + "contributorType" => "HostingInstitution", + "id" => "https://ror.org/046ak2485", + "name" => "Freie Universität Berlin", + "type" => "Organization", + }, + ], + ) + expect( + response.dig("data", "dissertations", "nodes", 0, "contributors"), + ).to eq( + [ + { + "contributorType" => "HostingInstitution", + "id" => "https://ror.org/046ak2485", + "name" => "Freie Universität Berlin", + "type" => "Organization", + }, + ], + ) end end end diff --git a/spec/graphql/types/doi_item_spec.rb b/spec/graphql/types/doi_item_spec.rb index e7c8dcbad..2db00a772 100644 --- a/spec/graphql/types/doi_item_spec.rb +++ b/spec/graphql/types/doi_item_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DoiItem do @@ -27,7 +29,11 @@ it { is_expected.to have_field(:url).of_type("Url") } it { is_expected.to have_field(:repository).of_type("Repository") } it { is_expected.to have_field(:member).of_type("Member") } - it { is_expected.to have_field(:registrationAgency).of_type("RegistrationAgency") } + it do + is_expected.to have_field(:registrationAgency).of_type( + "RegistrationAgency", + ) + end it { is_expected.to have_field(:formattedCitation).of_type("String") } it { is_expected.to have_field(:xml).of_type("String!") } it { is_expected.to have_field(:bibtex).of_type("String!") } @@ -41,13 +47,27 @@ it { is_expected.to have_field(:partCount).of_type("Int") } it { is_expected.to have_field(:partOfCount).of_type("Int") } it { is_expected.to have_field(:citationsOverTime).of_type("[YearTotal!]") } - it { is_expected.to have_field(:viewsOverTime).of_type("[YearMonthTotal!]") } - it { is_expected.to have_field(:downloadsOverTime).of_type("[YearMonthTotal!]") } - it { is_expected.to have_field(:citations).of_type("WorkConnectionWithTotal") } - it { is_expected.to have_field(:references).of_type("WorkConnectionWithTotal") } + it do + is_expected.to have_field(:viewsOverTime).of_type("[YearMonthTotal!]") + end + it do + is_expected.to have_field(:downloadsOverTime).of_type("[YearMonthTotal!]") + end + it do + is_expected.to have_field(:citations).of_type("WorkConnectionWithTotal") + end + it do + is_expected.to have_field(:references).of_type("WorkConnectionWithTotal") + end it { is_expected.to have_field(:parts).of_type("WorkConnectionWithTotal") } - it { is_expected.to have_field(:part_of).of_type("WorkConnectionWithTotal") } - it { is_expected.to have_field(:versions).of_type("WorkConnectionWithTotal") } - it { is_expected.to have_field(:version_of).of_type("WorkConnectionWithTotal") } + it do + is_expected.to have_field(:part_of).of_type("WorkConnectionWithTotal") + end + it do + is_expected.to have_field(:versions).of_type("WorkConnectionWithTotal") + end + it do + is_expected.to have_field(:version_of).of_type("WorkConnectionWithTotal") + end end end diff --git a/spec/graphql/types/employment_type_spec.rb b/spec/graphql/types/employment_type_spec.rb index c0b61ca90..8ec989cd7 100644 --- a/spec/graphql/types/employment_type_spec.rb +++ b/spec/graphql/types/employment_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe EmploymentType do diff --git a/spec/graphql/types/event_type_spec.rb b/spec/graphql/types/event_type_spec.rb index 3e893f7fc..3252a2dc4 100644 --- a/spec/graphql/types/event_type_spec.rb +++ b/spec/graphql/types/event_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe EventType do diff --git a/spec/graphql/types/facet_type_spec.rb b/spec/graphql/types/facet_type_spec.rb index 58be566b1..58674fcf3 100644 --- a/spec/graphql/types/facet_type_spec.rb +++ b/spec/graphql/types/facet_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe FacetType do diff --git a/spec/graphql/types/funder_type_spec.rb b/spec/graphql/types/funder_type_spec.rb index 293dc09da..d9ff18584 100644 --- a/spec/graphql/types/funder_type_spec.rb +++ b/spec/graphql/types/funder_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe FunderType do @@ -11,26 +13,58 @@ it { is_expected.to have_field(:citationCount).of_type("Int") } it { is_expected.to have_field(:viewCount).of_type("Int") } it { is_expected.to have_field(:downloadCount).of_type("Int") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal") } + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal", + ) + end it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal") } end describe "find funder", elasticsearch: true, vcr: true do let(:client) { create(:client) } let(:doi) do - create(:doi, client: client, aasm_state: "findable", funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100009053", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "The Wellcome Trust DBT India Alliance", - }]) + create( + :doi, + client: client, + aasm_state: "findable", + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100009053", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "The Wellcome Trust DBT India Alliance", + }, + ], + ) end let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:citation_event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } - let!(:citation_event2) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi2.doi}", relation_type_id: "is-referenced-by", occurred_at: "2016-06-13T16:14:19Z") } + let!(:citation_event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:citation_event2) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi2.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2016-06-13T16:14:19Z", + ) + end before do Client.import @@ -40,8 +74,8 @@ end let(:query) do - %(query { - funder(id: "https://doi.org/10.13039/501100009053") { + "query { + funder(id: \"https://doi.org/10.13039/501100009053\") { id name alternateName @@ -73,38 +107,61 @@ } } } - }) + }" end it "returns funder information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "funder", "id")).to eq("https://doi.org/10.13039/501100009053") - expect(response.dig("data", "funder", "name")).to eq("The Wellcome Trust DBT India Alliance") + expect(response.dig("data", "funder", "id")).to eq( + "https://doi.org/10.13039/501100009053", + ) + expect(response.dig("data", "funder", "name")).to eq( + "The Wellcome Trust DBT India Alliance", + ) expect(response.dig("data", "funder", "citationCount")).to eq(2) expect(response.dig("data", "funder", "viewCount")).to eq(0) expect(response.dig("data", "funder", "downloadCount")).to eq(0) expect(response.dig("data", "funder", "works", "totalCount")).to eq(1) - expect(Base64.urlsafe_decode64(response.dig("data", "funder", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(doi.uid) - expect(response.dig("data", "funder", "works", "pageInfo", "hasNextPage")).to be false - expect(response.dig("data", "funder", "works", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "funder", "works", "resourceTypes")).to eq([{ "count" => 1, "id" => "dataset", "title" => "Dataset" }]) + expect( + Base64.urlsafe_decode64( + response.dig("data", "funder", "works", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(doi.uid) + expect( + response.dig("data", "funder", "works", "pageInfo", "hasNextPage"), + ).to be false + expect(response.dig("data", "funder", "works", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) + expect(response.dig("data", "funder", "works", "resourceTypes")).to eq( + [{ "count" => 1, "id" => "dataset", "title" => "Dataset" }], + ) expect(response.dig("data", "funder", "works", "nodes").length).to eq(1) work = response.dig("data", "funder", "works", "nodes", 0) - expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(work.dig("titles", 0, "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) expect(work.dig("citationCount")).to eq(2) end end describe "query funders", elasticsearch: true, vcr: true do let!(:dois) do - create_list(:doi, 3, funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/100010269", - "funderIdentifierType" => "DOI", - }]) + create_list( + :doi, + 3, + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/100010269", + "funderIdentifierType" => "DOI", + }, + ], + ) end before do @@ -113,8 +170,8 @@ end let(:query) do - %(query { - funders(query: "Wellcome Trust", first: 30, after: "Mg") { + "query { + funders(query: \"Wellcome Trust\", first: 30, after: \"Mg\") { totalCount pageInfo { endCursor @@ -134,21 +191,25 @@ } } } - }) + }" end it "returns funder information" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "funders", "totalCount")).to eq(4) - expect(response.dig("data", "funders", "pageInfo", "endCursor")).to eq("Mw") + expect(response.dig("data", "funders", "pageInfo", "endCursor")).to eq( + "Mw", + ) # expect(response.dig("data", "funders", "pageInfo", "hasNextPage")).to eq(false) expect(response.dig("data", "funders", "nodes").length).to eq(2) funder = response.dig("data", "funders", "nodes", 0) expect(funder.fetch("id")).to eq("https://doi.org/10.13039/100010269") expect(funder.fetch("name")).to eq("Wellcome Trust") - expect(funder.fetch("alternateName")).to eq(["The Wellcome Trust", "WT", "Wellcome"]) + expect(funder.fetch("alternateName")).to eq( + ["The Wellcome Trust", "WT", "Wellcome"], + ) # expect(funder.dig("works", "totalCount")).to eq(3) # expect(funder.dig("works", "years")).to eq([{"count"=>3, "title"=>"2011"}]) end @@ -156,8 +217,8 @@ describe "query funders national", elasticsearch: true, vcr: true do let(:query) do - %(query { - funders(query: "national", first: 10, after: "OA") { + "query { + funders(query: \"national\", first: 10, after: \"OA\") { totalCount pageInfo { endCursor @@ -172,21 +233,27 @@ } } } - }) + }" end it "returns funder information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "funders", "totalCount")).to eq(1144) - expect(response.dig("data", "funders", "pageInfo", "endCursor")).to eq("OQ") - expect(response.dig("data", "funders", "pageInfo", "hasNextPage")).to eq(true) + expect(response.dig("data", "funders", "totalCount")).to eq(1_144) + expect(response.dig("data", "funders", "pageInfo", "endCursor")).to eq( + "OQ", + ) + expect(response.dig("data", "funders", "pageInfo", "hasNextPage")).to eq( + true, + ) expect(response.dig("data", "funders", "nodes").length).to eq(10) funder = response.dig("data", "funders", "nodes", 0) expect(funder.fetch("id")).to eq("https://doi.org/10.13039/100000051") - expect(funder.fetch("name")).to eq("National Human Genome Research Institute") - expect(funder.fetch("alternateName")).to eq(["NHGRI"]) + expect(funder.fetch("name")).to eq( + "National Human Genome Research Institute", + ) + expect(funder.fetch("alternateName")).to eq(%w[NHGRI]) expect(funder.dig("address", "country")).to eq("United States") end end diff --git a/spec/graphql/types/funding_type_spec.rb b/spec/graphql/types/funding_type_spec.rb index fcb11548f..4c3a7ccb8 100644 --- a/spec/graphql/types/funding_type_spec.rb +++ b/spec/graphql/types/funding_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe FundingType do diff --git a/spec/graphql/types/identifier_type_spec.rb b/spec/graphql/types/identifier_type_spec.rb index 5f9531040..fec92a650 100644 --- a/spec/graphql/types/identifier_type_spec.rb +++ b/spec/graphql/types/identifier_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe IdentifierType do diff --git a/spec/graphql/types/image_type_spec.rb b/spec/graphql/types/image_type_spec.rb index eb98e504f..019a2f9cc 100644 --- a/spec/graphql/types/image_type_spec.rb +++ b/spec/graphql/types/image_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ImageType do diff --git a/spec/graphql/types/instrument_type_spec.rb b/spec/graphql/types/instrument_type_spec.rb index 9983d30e3..743d697f9 100644 --- a/spec/graphql/types/instrument_type_spec.rb +++ b/spec/graphql/types/instrument_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe InstrumentType do @@ -9,7 +11,16 @@ end describe "query instruments", elasticsearch: true do - let!(:instruments) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Other", "resourceType" => "Instrument" }, aasm_state: "findable") } + let!(:instruments) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Other", "resourceType" => "Instrument" + }, + aasm_state: "findable", + ) + end before do Doi.import @@ -18,14 +29,14 @@ end let(:query) do - %(query { + "query { instruments { totalCount nodes { id } } - }) + }" end it "returns all instruments" do @@ -33,7 +44,9 @@ expect(response.dig("data", "instruments", "totalCount")).to eq(3) expect(response.dig("data", "instruments", "nodes").length).to eq(3) - expect(response.dig("data", "instruments", "nodes", 0, "id")).to eq(@dois.first.identifier) + expect(response.dig("data", "instruments", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) end end end diff --git a/spec/graphql/types/interactive_resource_type_spec.rb b/spec/graphql/types/interactive_resource_type_spec.rb index 1423573cc..4affbf7ff 100644 --- a/spec/graphql/types/interactive_resource_type_spec.rb +++ b/spec/graphql/types/interactive_resource_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe InteractiveResourceType do diff --git a/spec/graphql/types/issn_type_spec.rb b/spec/graphql/types/issn_type_spec.rb index 32f13ec78..9d9b9355f 100644 --- a/spec/graphql/types/issn_type_spec.rb +++ b/spec/graphql/types/issn_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe IssnType do diff --git a/spec/graphql/types/journal_article_type_spec.rb b/spec/graphql/types/journal_article_type_spec.rb index aa11cb18c..6c2dfe06d 100644 --- a/spec/graphql/types/journal_article_type_spec.rb +++ b/spec/graphql/types/journal_article_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe JournalArticleType do @@ -9,7 +11,16 @@ end describe "query journal articles", elasticsearch: true do - let!(:journal_articles) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" }, aasm_state: "findable") } + let!(:journal_articles) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" + }, + aasm_state: "findable", + ) + end before do Doi.import @@ -18,14 +29,14 @@ end let(:query) do - %(query { + "query { journalArticles { totalCount nodes { id } } - }) + }" end it "returns all journal articles" do @@ -33,21 +44,46 @@ expect(response.dig("data", "journalArticles", "totalCount")).to eq(3) expect(response.dig("data", "journalArticles", "nodes").length).to eq(3) - expect(response.dig("data", "journalArticles", "nodes", 0, "id")).to eq(@dois.first.identifier) + expect(response.dig("data", "journalArticles", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) end end describe "query journal articles by person", elasticsearch: true do - let!(:journal_articles) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" }, aasm_state: "findable") } + let!(:journal_articles) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" + }, + aasm_state: "findable", + ) + end let!(:journal_article) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "JournalArticle" + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -56,8 +92,8 @@ end let(:query) do - %(query { - journalArticles(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + journalArticles(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -68,14 +104,16 @@ id } } - }) + }" end it "returns journal articles" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "journalArticles", "totalCount")).to eq(3) - expect(response.dig("data", "journalArticles", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "journalArticles", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "journalArticles", "nodes").length).to eq(3) # expect(response.dig("data", "journalArticles", "nodes", 0, "id")).to eq(@dois.first.identifier) end diff --git a/spec/graphql/types/label_type_spec.rb b/spec/graphql/types/label_type_spec.rb index b5fbbd378..ee06d048e 100644 --- a/spec/graphql/types/label_type_spec.rb +++ b/spec/graphql/types/label_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe LabelType do diff --git a/spec/graphql/types/language_type_spec.rb b/spec/graphql/types/language_type_spec.rb index dbceea3ba..30c146074 100644 --- a/spec/graphql/types/language_type_spec.rb +++ b/spec/graphql/types/language_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe LanguageType do diff --git a/spec/graphql/types/me_type_spec.rb b/spec/graphql/types/me_type_spec.rb index 0e57a3ca0..0e4e8ca57 100644 --- a/spec/graphql/types/me_type_spec.rb +++ b/spec/graphql/types/me_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe MeType do @@ -11,18 +13,21 @@ describe "find current_user" do let(:query) do - %(query { + "query { me { id name } - }) + }" end it "returns current_user" do # current_user is normally set in the API using the authorization header - current_user = OpenStruct.new(uid: "0000-0001-5489-3594", name: "Josiah Carberry") - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json + current_user = + OpenStruct.new(uid: "0000-0001-5489-3594", name: "Josiah Carberry") + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json expect(response.dig("data", "me", "id")).to eq("0000-0001-5489-3594") expect(response.dig("data", "me", "name")).to eq("Josiah Carberry") @@ -31,12 +36,12 @@ describe "find current_user not authenticated" do let(:query) do - %(query { + "query { me { id name } - }) + }" end it "not returns current_user" do diff --git a/spec/graphql/types/member_prefix_type_spec.rb b/spec/graphql/types/member_prefix_type_spec.rb index 7eeb9dea1..e1267dddb 100644 --- a/spec/graphql/types/member_prefix_type_spec.rb +++ b/spec/graphql/types/member_prefix_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe MemberPrefixType do diff --git a/spec/graphql/types/member_type_spec.rb b/spec/graphql/types/member_type_spec.rb index 09dd5a7fe..1842f3cac 100644 --- a/spec/graphql/types/member_type_spec.rb +++ b/spec/graphql/types/member_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe MemberType do @@ -17,11 +19,29 @@ it { is_expected.to have_field(:organizationType).of_type("String") } it { is_expected.to have_field(:focusArea).of_type("String") } it { is_expected.to have_field(:joined).of_type("ISO8601Date") } - it { is_expected.to have_field(:repositories).of_type("RepositoryConnectionWithTotal") } - it { is_expected.to have_field(:prefixes).of_type("MemberPrefixConnectionWithTotal") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal") } + it do + is_expected.to have_field(:repositories).of_type( + "RepositoryConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:prefixes).of_type( + "MemberPrefixConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal", + ) + end it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal") } end @@ -35,7 +55,7 @@ end let(:query) do - %(query { + "query { members(first: 5) { totalCount pageInfo { @@ -77,25 +97,51 @@ name } } - }) + }" end it "returns all members" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "members", "totalCount")).to eq(6) - expect(Base64.urlsafe_decode64(response.dig("data", "members", "pageInfo", "endCursor")).split(",").last).to eq(@members[4].uid) - expect(response.dig("data", "members", "pageInfo", "hasNextPage")).to be true + expect( + Base64.urlsafe_decode64( + response.dig("data", "members", "pageInfo", "endCursor"), + ). + split(","). + last, + ).to eq(@members[4].uid) + expect( + response.dig("data", "members", "pageInfo", "hasNextPage"), + ).to be true - expect(response.dig("data", "members", "years")).to eq([{ "count" => 6, "id" => "2020", "title" => "2020" }]) - expect(response.dig("data", "members", "regions")).to eq([{ "count" => 6, "id" => "emea", "title" => "Europe, Middle East and Africa" }]) - expect(response.dig("data", "members", "memberTypes")).to eq([{ "count" => 6, "id" => "direct_member", "title" => "Direct Member" }]) + expect(response.dig("data", "members", "years")).to eq( + [{ "count" => 6, "id" => "2020", "title" => "2020" }], + ) + expect(response.dig("data", "members", "regions")).to eq( + [ + { + "count" => 6, + "id" => "emea", + "title" => "Europe, Middle East and Africa", + }, + ], + ) + expect(response.dig("data", "members", "memberTypes")).to eq( + [{ "count" => 6, "id" => "direct_member", "title" => "Direct Member" }], + ) expect(response.dig("data", "members", "organizationTypes")).to eq([]) expect(response.dig("data", "members", "focusAreas")).to eq([]) - expect(response.dig("data", "members", "nonProfitStatuses")).to eq([{ "count" => 6, "id" => "non-profit", "title" => "Non Profit" }]) + expect(response.dig("data", "members", "nonProfitStatuses")).to eq( + [{ "count" => 6, "id" => "non-profit", "title" => "Non Profit" }], + ) expect(response.dig("data", "members", "nodes").length).to eq(5) - expect(response.dig("data", "members", "nodes", 0, "id")).to eq(@members.first.uid) - expect(response.dig("data", "members", "nodes", 0, "name")).to eq(@members.first.name) + expect(response.dig("data", "members", "nodes", 0, "id")).to eq( + @members.first.uid, + ) + expect(response.dig("data", "members", "nodes", 0, "name")).to eq( + @members.first.name, + ) end end @@ -104,7 +150,9 @@ let(:client) { create(:client, provider: provider, software: "dataverse") } let!(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:prefix) { create(:prefix) } - let!(:provider_prefixes) { create_list(:provider_prefix, 3, provider: provider) } + let!(:provider_prefixes) do + create_list(:provider_prefix, 3, provider: provider) + end before do Provider.import @@ -113,12 +161,13 @@ Prefix.import ProviderPrefix.import sleep 3 - @provider_prefixes = ProviderPrefix.query(nil, page: { cursor: [], size: 3 }).results.to_a + @provider_prefixes = + ProviderPrefix.query(nil, page: { cursor: [], size: 3 }).results.to_a end let(:query) do - %(query { - member(id: "testc") { + "query { + member(id: \"testc\") { id name country { @@ -158,21 +207,33 @@ } } } - }) + }" end it "returns member" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "member", "id")).to eq(provider.uid) - expect(response.dig("data", "member", "memberRole")).to eq("id" => "direct_member", "name" => "Direct Member") + expect(response.dig("data", "member", "memberRole")).to eq( + "id" => "direct_member", "name" => "Direct Member", + ) expect(response.dig("data", "member", "name")).to eq("My provider") - expect(response.dig("data", "member", "country")).to eq("name" => "Germany") + expect(response.dig("data", "member", "country")).to eq( + "name" => "Germany", + ) - expect(response.dig("data", "member", "repositories", "totalCount")).to eq(1) - expect(response.dig("data", "member", "repositories", "years")).to eq([{ "count" => 1, "id" => "2020" }]) - expect(response.dig("data", "member", "repositories", "software")).to eq([{ "count" => 1, "id" => "dataverse" }]) - expect(response.dig("data", "member", "repositories", "nodes").length).to eq(1) + expect( + response.dig("data", "member", "repositories", "totalCount"), + ).to eq(1) + expect(response.dig("data", "member", "repositories", "years")).to eq( + [{ "count" => 1, "id" => "2020" }], + ) + expect(response.dig("data", "member", "repositories", "software")).to eq( + [{ "count" => 1, "id" => "dataverse" }], + ) + expect( + response.dig("data", "member", "repositories", "nodes").length, + ).to eq(1) repository1 = response.dig("data", "member", "repositories", "nodes", 0) expect(repository1.fetch("id")).to eq(client.uid) expect(repository1.fetch("name")).to eq(client.name) @@ -180,8 +241,12 @@ expect(repository1.dig("datasets", "totalCount")).to eq(1) expect(response.dig("data", "member", "prefixes", "totalCount")).to eq(3) - expect(response.dig("data", "member", "prefixes", "years")).to eq([{ "count" => 3, "id" => "2020" }]) - expect(response.dig("data", "member", "prefixes", "nodes").length).to eq(3) + expect(response.dig("data", "member", "prefixes", "years")).to eq( + [{ "count" => 3, "id" => "2020" }], + ) + expect(response.dig("data", "member", "prefixes", "nodes").length).to eq( + 3, + ) prefix1 = response.dig("data", "member", "prefixes", "nodes", 0) expect(prefix1.fetch("name")).to eq(@provider_prefixes.first.prefix_id) end diff --git a/spec/graphql/types/model_type_spec.rb b/spec/graphql/types/model_type_spec.rb index 7aff2d5a5..22ef3962a 100644 --- a/spec/graphql/types/model_type_spec.rb +++ b/spec/graphql/types/model_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ModelType do diff --git a/spec/graphql/types/organization_type_spec.rb b/spec/graphql/types/organization_type_spec.rb index 63a6797dc..cb3e138e5 100644 --- a/spec/graphql/types/organization_type_spec.rb +++ b/spec/graphql/types/organization_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe OrganizationType do @@ -17,39 +19,67 @@ it { is_expected.to have_field(:citationCount).of_type("Int") } it { is_expected.to have_field(:viewCount).of_type("Int") } it { is_expected.to have_field(:downloadCount).of_type("Int") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal") } + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal", + ) + end it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal") } end describe "find organization", elasticsearch: true, vcr: true do let!(:doi) do - create(:doi, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }]) + ) end let!(:funder_doi) do - create(:doi, aasm_state: "findable", funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100000735", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "University of Cambridge", - }]) - end - let(:provider) { create(:provider, symbol: "LPSW", ror_id: "https://ror.org/013meh722") } + create( + :doi, + aasm_state: "findable", + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100000735", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "University of Cambridge", + }, + ], + ) + end + let(:provider) do + create(:provider, symbol: "LPSW", ror_id: "https://ror.org/013meh722") + end let(:client) { create(:client, provider: provider) } let!(:member_doi) { create(:doi, aasm_state: "findable", client: client) } @@ -59,8 +89,8 @@ end let(:query) do - %(query { - organization(id: "https://ror.org/013meh722") { + "query { + organization(id: \"https://ror.org/013meh722\") { id memberId memberRoleId @@ -104,63 +134,112 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/013meh722") + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/013meh722", + ) expect(response.dig("data", "organization", "memberId")).to eq("lpsw") - expect(response.dig("data", "organization", "memberRoleId")).to eq("direct_member") - expect(response.dig("data", "organization", "memberRoleName")).to eq("Direct Member") - expect(response.dig("data", "organization", "name")).to eq("University of Cambridge") - expect(response.dig("data", "organization", "alternateName")).to eq(["Cambridge University"]) - expect(response.dig("data", "organization", "wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") - expect(response.dig("data", "organization", "twitter")).to eq("Cambridge_Uni") - expect(response.dig("data", "organization", "inceptionYear")).to eq(1209) - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => 52.205277777778, "pointLongitude" => 0.11722222222222) + expect(response.dig("data", "organization", "memberRoleId")).to eq( + "direct_member", + ) + expect(response.dig("data", "organization", "memberRoleName")).to eq( + "Direct Member", + ) + expect(response.dig("data", "organization", "name")).to eq( + "University of Cambridge", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Cambridge University"], + ) + expect(response.dig("data", "organization", "wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) + expect(response.dig("data", "organization", "twitter")).to eq( + "Cambridge_Uni", + ) + expect(response.dig("data", "organization", "inceptionYear")).to eq(1_209) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => 52.205277777778, + "pointLongitude" => 0.11722222222222, + ) expect(response.dig("data", "organization", "citationCount")).to eq(0) - expect(response.dig("data", "organization", "identifiers").count).to eq(38) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "10.13039/501100000735", "identifierType" => "fundref") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") + expect(response.dig("data", "organization", "identifiers").count).to eq( + 38, + ) + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "10.13039/501100000735", "identifierType" => "fundref", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) - expect(response.dig("data", "organization", "works", "totalCount")).to eq(3) - expect(response.dig("data", "organization", "works", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "organization", "works", "resourceTypes")).to eq([{ "count" => 3, "title" => "Dataset" }]) + expect(response.dig("data", "organization", "works", "totalCount")).to eq( + 3, + ) + expect(response.dig("data", "organization", "works", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) + expect( + response.dig("data", "organization", "works", "resourceTypes"), + ).to eq([{ "count" => 3, "title" => "Dataset" }]) # TODO should be 3 nodes - expect(response.dig("data", "organization", "works", "nodes").length).to eq(2) + expect( + response.dig("data", "organization", "works", "nodes").length, + ).to eq(2) work = response.dig("data", "organization", "works", "nodes", 0) - expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(work.dig("titles", 0, "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end describe "find organization by grid_id", elasticsearch: true, vcr: true do let!(:doi) do - create(:doi, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }]) + ) end let!(:funder_doi) do - create(:doi, aasm_state: "findable", funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100000735", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "University of Cambridge", - }]) + create( + :doi, + aasm_state: "findable", + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100000735", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "University of Cambridge", + }, + ], + ) end before do @@ -169,8 +248,8 @@ end let(:query) do - %(query { - organization(gridId: "grid.5335.0") { + "query { + organization(gridId: \"grid.5335.0\") { id name alternateName @@ -211,59 +290,105 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/013meh722") - expect(response.dig("data", "organization", "name")).to eq("University of Cambridge") - expect(response.dig("data", "organization", "alternateName")).to eq(["Cambridge University"]) - expect(response.dig("data", "organization", "wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") - expect(response.dig("data", "organization", "twitter")).to eq("Cambridge_Uni") - expect(response.dig("data", "organization", "inceptionYear")).to eq(1209) - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => 52.205277777778, "pointLongitude" => 0.11722222222222) + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/013meh722", + ) + expect(response.dig("data", "organization", "name")).to eq( + "University of Cambridge", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Cambridge University"], + ) + expect(response.dig("data", "organization", "wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) + expect(response.dig("data", "organization", "twitter")).to eq( + "Cambridge_Uni", + ) + expect(response.dig("data", "organization", "inceptionYear")).to eq(1_209) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => 52.205277777778, + "pointLongitude" => 0.11722222222222, + ) expect(response.dig("data", "organization", "citationCount")).to eq(0) - expect(response.dig("data", "organization", "identifiers").count).to eq(38) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "10.13039/501100000735", "identifierType" => "fundref") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") + expect(response.dig("data", "organization", "identifiers").count).to eq( + 38, + ) + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "10.13039/501100000735", "identifierType" => "fundref", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) - expect(response.dig("data", "organization", "works", "totalCount")).to eq(2) - expect(response.dig("data", "organization", "works", "published")).to eq([{ "count" => 2, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "organization", "works", "resourceTypes")).to eq([{ "count" => 2, "title" => "Dataset" }]) - expect(response.dig("data", "organization", "works", "nodes").length).to eq(2) + expect(response.dig("data", "organization", "works", "totalCount")).to eq( + 2, + ) + expect(response.dig("data", "organization", "works", "published")).to eq( + [{ "count" => 2, "id" => "2011", "title" => "2011" }], + ) + expect( + response.dig("data", "organization", "works", "resourceTypes"), + ).to eq([{ "count" => 2, "title" => "Dataset" }]) + expect( + response.dig("data", "organization", "works", "nodes").length, + ).to eq(2) work = response.dig("data", "organization", "works", "nodes", 0) - expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(work.dig("titles", 0, "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end - describe "find organization by crossref_funder_id", elasticsearch: true, vcr: true do + describe "find organization by crossref_funder_id", + elasticsearch: true, vcr: true do let!(:doi) do - create(:doi, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }]) + ) end let!(:funder_doi) do - create(:doi, aasm_state: "findable", funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100000735", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "University of Cambridge", - }]) + create( + :doi, + aasm_state: "findable", + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100000735", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "University of Cambridge", + }, + ], + ) end before do @@ -272,8 +397,8 @@ end let(:query) do - %(query { - organization(crossrefFunderId: "10.13039/501100000735") { + "query { + organization(crossrefFunderId: \"10.13039/501100000735\") { id name alternateName @@ -314,38 +439,67 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/013meh722") - expect(response.dig("data", "organization", "name")).to eq("University of Cambridge") - expect(response.dig("data", "organization", "alternateName")).to eq(["Cambridge University"]) - expect(response.dig("data", "organization", "wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") - expect(response.dig("data", "organization", "twitter")).to eq("Cambridge_Uni") - expect(response.dig("data", "organization", "inceptionYear")).to eq(1209) - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => 52.205277777778, "pointLongitude" => 0.11722222222222) + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/013meh722", + ) + expect(response.dig("data", "organization", "name")).to eq( + "University of Cambridge", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Cambridge University"], + ) + expect(response.dig("data", "organization", "wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) + expect(response.dig("data", "organization", "twitter")).to eq( + "Cambridge_Uni", + ) + expect(response.dig("data", "organization", "inceptionYear")).to eq(1_209) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => 52.205277777778, + "pointLongitude" => 0.11722222222222, + ) expect(response.dig("data", "organization", "citationCount")).to eq(0) - expect(response.dig("data", "organization", "identifiers").count).to eq(38) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "10.13039/501100000735", "identifierType" => "fundref") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") + expect(response.dig("data", "organization", "identifiers").count).to eq( + 38, + ) + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "10.13039/501100000735", "identifierType" => "fundref", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) - expect(response.dig("data", "organization", "works", "totalCount")).to eq(2) - expect(response.dig("data", "organization", "works", "published")).to eq([{ "count" => 2, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "organization", "works", "resourceTypes")).to eq([{ "count" => 2, "title" => "Dataset" }]) - expect(response.dig("data", "organization", "works", "nodes").length).to eq(2) + expect(response.dig("data", "organization", "works", "totalCount")).to eq( + 2, + ) + expect(response.dig("data", "organization", "works", "published")).to eq( + [{ "count" => 2, "id" => "2011", "title" => "2011" }], + ) + expect( + response.dig("data", "organization", "works", "resourceTypes"), + ).to eq([{ "count" => 2, "title" => "Dataset" }]) + expect( + response.dig("data", "organization", "works", "nodes").length, + ).to eq(2) work = response.dig("data", "organization", "works", "nodes", 0) - expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(work.dig("titles", 0, "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end describe "find organization no wikidata", elasticsearch: true, vcr: true do let(:query) do - %(query { - organization(id: "https://ror.org/02q0ygf45") { + "query { + organization(id: \"https://ror.org/02q0ygf45\") { id name alternateName @@ -360,29 +514,41 @@ identifierType } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/02q0ygf45") - expect(response.dig("data", "organization", "name")).to eq("OBS Medical (United Kingdom)") - expect(response.dig("data", "organization", "alternateName")).to eq(["Oxford BioSignals"]) + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/02q0ygf45", + ) + expect(response.dig("data", "organization", "name")).to eq( + "OBS Medical (United Kingdom)", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Oxford BioSignals"], + ) expect(response.dig("data", "organization", "wikipediaUrl")).to be_nil expect(response.dig("data", "organization", "twitter")).to be_nil expect(response.dig("data", "organization", "inception_year")).to be_nil - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => nil, "pointLongitude" => nil) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => nil, "pointLongitude" => nil, + ) expect(response.dig("data", "organization", "identifiers").count).to eq(2) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "grid.487335.e", "identifierType" => "grid") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000403987680", "identifierType" => "isni") + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "grid.487335.e", "identifierType" => "grid", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000403987680", "identifierType" => "isni", + ) end end describe "find organization with people", elasticsearch: true, vcr: true do let(:query) do - %(query { - organization(id: "https://ror.org/013meh722") { + "query { + organization(id: \"https://ror.org/013meh722\") { id name alternateName @@ -412,36 +578,62 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/013meh722") - expect(response.dig("data", "organization", "name")).to eq("University of Cambridge") - expect(response.dig("data", "organization", "alternateName")).to eq(["Cambridge University"]) - expect(response.dig("data", "organization", "country")).to eq("id" => "GB", "name" => "United Kingdom") - expect(response.dig("data", "organization", "wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") - expect(response.dig("data", "organization", "twitter")).to eq("Cambridge_Uni") - expect(response.dig("data", "organization", "inceptionYear")).to eq(1209) - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => 52.205277777778, "pointLongitude" => 0.11722222222222) - expect(response.dig("data", "organization", "identifiers").count).to eq(38) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "10.13039/501100000735", "identifierType" => "fundref") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") - - expect(response.dig("data", "organization", "people", "totalCount")).to eq(14181) - expect(response.dig("data", "organization", "people", "nodes").length).to eq(25) + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/013meh722", + ) + expect(response.dig("data", "organization", "name")).to eq( + "University of Cambridge", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Cambridge University"], + ) + expect(response.dig("data", "organization", "country")).to eq( + "id" => "GB", "name" => "United Kingdom", + ) + expect(response.dig("data", "organization", "wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) + expect(response.dig("data", "organization", "twitter")).to eq( + "Cambridge_Uni", + ) + expect(response.dig("data", "organization", "inceptionYear")).to eq(1_209) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => 52.205277777778, + "pointLongitude" => 0.11722222222222, + ) + expect(response.dig("data", "organization", "identifiers").count).to eq( + 38, + ) + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "10.13039/501100000735", "identifierType" => "fundref", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) + + expect( + response.dig("data", "organization", "people", "totalCount"), + ).to eq(14_181) + expect( + response.dig("data", "organization", "people", "nodes").length, + ).to eq(25) person = response.dig("data", "organization", "people", "nodes", 0) expect(person.dig("name")).to eq("Michael Edwards") end end - describe "find organization with people query", elasticsearch: true, vcr: true do + describe "find organization with people query", + elasticsearch: true, vcr: true do let(:query) do - %(query { - organization(id: "https://ror.org/013meh722") { + "query { + organization(id: \"https://ror.org/013meh722\") { id name alternateName @@ -460,7 +652,7 @@ identifier identifierType } - people(query: "oxford") { + people(query: \"oxford\") { totalCount nodes { id @@ -471,25 +663,48 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organization", "id")).to eq("https://ror.org/013meh722") - expect(response.dig("data", "organization", "name")).to eq("University of Cambridge") - expect(response.dig("data", "organization", "alternateName")).to eq(["Cambridge University"]) - expect(response.dig("data", "organization", "wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") - expect(response.dig("data", "organization", "twitter")).to eq("Cambridge_Uni") - expect(response.dig("data", "organization", "inceptionYear")).to eq(1209) - expect(response.dig("data", "organization", "geolocation")).to eq("pointLatitude" => 52.205277777778, "pointLongitude" => 0.11722222222222) - expect(response.dig("data", "organization", "identifiers").count).to eq(38) - expect(response.dig("data", "organization", "identifiers").first).to eq("identifier" => "10.13039/501100000735", "identifierType" => "fundref") - expect(response.dig("data", "organization", "identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") + expect(response.dig("data", "organization", "id")).to eq( + "https://ror.org/013meh722", + ) + expect(response.dig("data", "organization", "name")).to eq( + "University of Cambridge", + ) + expect(response.dig("data", "organization", "alternateName")).to eq( + ["Cambridge University"], + ) + expect(response.dig("data", "organization", "wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) + expect(response.dig("data", "organization", "twitter")).to eq( + "Cambridge_Uni", + ) + expect(response.dig("data", "organization", "inceptionYear")).to eq(1_209) + expect(response.dig("data", "organization", "geolocation")).to eq( + "pointLatitude" => 52.205277777778, + "pointLongitude" => 0.11722222222222, + ) + expect(response.dig("data", "organization", "identifiers").count).to eq( + 38, + ) + expect(response.dig("data", "organization", "identifiers").first).to eq( + "identifier" => "10.13039/501100000735", "identifierType" => "fundref", + ) + expect(response.dig("data", "organization", "identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) - expect(response.dig("data", "organization", "people", "totalCount")).to eq(1988) - expect(response.dig("data", "organization", "people", "nodes").length).to eq(25) + expect( + response.dig("data", "organization", "people", "totalCount"), + ).to eq(1_988) + expect( + response.dig("data", "organization", "people", "nodes").length, + ).to eq(25) person = response.dig("data", "organization", "people", "nodes", 0) expect(person.dig("name")).to eq("Christopher Haley") @@ -498,8 +713,8 @@ describe "find organization not found", elasticsearch: true, vcr: true do let(:query) do - %(query { - organization(id: "https://ror.org/xxxx") { + "query { + organization(id: \"https://ror.org/xxxx\") { id name alternateName @@ -529,20 +744,28 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json expect(response.dig("data")).to be_nil - expect(response.dig("errors")).to eq([{ "locations" => [{ "column" => 9, "line" => 2 }], "message" => "Record not found", "path" => ["organization"] }]) + expect(response.dig("errors")).to eq( + [ + { + "locations" => [{ "column" => 9, "line" => 2 }], + "message" => "Record not found", + "path" => %w[organization], + }, + ], + ) end end describe "query all organizations", vcr: true do let(:query) do - %(query { + "query { organizations { totalCount years { @@ -551,36 +774,51 @@ count } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organizations", "totalCount")).to eq(98332) - expect(response.dig("data", "organizations", "years").first).to eq("count" => 80248, "id" => "2017", "title" => "2017") - expect(response.dig("data", "organizations", "years").last).to eq("count" => 513, "id" => "2020", "title" => "2020") + expect(response.dig("data", "organizations", "totalCount")).to eq(98_332) + expect(response.dig("data", "organizations", "years").first).to eq( + "count" => 80_248, "id" => "2017", "title" => "2017", + ) + expect(response.dig("data", "organizations", "years").last).to eq( + "count" => 513, "id" => "2020", "title" => "2020", + ) end end describe "query organizations", elasticsearch: true, vcr: true do let!(:dois) { create_list(:doi, 3) } let!(:doi) do - create(:doi, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }]) + ) end before do @@ -589,8 +827,8 @@ end let(:query) do - %(query { - organizations(query: "Cambridge University", after: "MQ") { + "query { + organizations(query: \"Cambridge University\", after: \"MQ\") { totalCount pageInfo { endCursor @@ -631,43 +869,63 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "organizations", "totalCount")).to eq(10790) - expect(response.dig("data", "organizations", "pageInfo", "endCursor")).to eq("Mg") - expect(response.dig("data", "organizations", "pageInfo", "hasNextPage")).to be true + expect(response.dig("data", "organizations", "totalCount")).to eq(10_790) + expect( + response.dig("data", "organizations", "pageInfo", "endCursor"), + ).to eq("Mg") + expect( + response.dig("data", "organizations", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "organizations", "types").length).to eq(8) - expect(response.dig("data", "organizations", "types").first).to eq("count" => 9630, "id" => "education", "title" => "Education") - expect(response.dig("data", "organizations", "countries").length).to eq(10) - expect(response.dig("data", "organizations", "countries").first).to eq("count" => 1771, "id" => "us", "title" => "United States of America") + expect(response.dig("data", "organizations", "types").first).to eq( + "count" => 9_630, "id" => "education", "title" => "Education", + ) + expect(response.dig("data", "organizations", "countries").length).to eq( + 10, + ) + expect(response.dig("data", "organizations", "countries").first).to eq( + "count" => 1_771, "id" => "us", "title" => "United States of America", + ) expect(response.dig("data", "organizations", "nodes").length).to eq(20) organization = response.dig("data", "organizations", "nodes", 0) expect(organization.fetch("id")).to eq("https://ror.org/013meh722") expect(organization.fetch("name")).to eq("University of Cambridge") - expect(organization.fetch("types")).to eq(["Education"]) - expect(organization.fetch("country")).to eq("id" => "GB", "name" => "United Kingdom") - expect(organization.fetch("alternateName")).to eq(["Cambridge University"]) - expect(organization.fetch("url")).to eq(["http://www.cam.ac.uk/"]) - expect(organization.fetch("wikipediaUrl")).to eq("http://en.wikipedia.org/wiki/University_of_Cambridge") + expect(organization.fetch("types")).to eq(%w[Education]) + expect(organization.fetch("country")).to eq( + "id" => "GB", "name" => "United Kingdom", + ) + expect(organization.fetch("alternateName")).to eq( + ["Cambridge University"], + ) + expect(organization.fetch("url")).to eq(%w[http://www.cam.ac.uk/]) + expect(organization.fetch("wikipediaUrl")).to eq( + "http://en.wikipedia.org/wiki/University_of_Cambridge", + ) expect(organization.fetch("identifiers").length).to eq(38) - expect(organization.fetch("identifiers").last).to eq("identifier" => "0000000121885934", "identifierType" => "isni") + expect(organization.fetch("identifiers").last).to eq( + "identifier" => "0000000121885934", "identifierType" => "isni", + ) expect(organization.dig("works", "totalCount")).to eq(1) - expect(organization.dig("works", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) + expect(organization.dig("works", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) end end describe "query organizations with umlaut", elasticsearch: true, vcr: true do let(:query) do - %(query { - organizations(query: "münster") { + "query { + organizations(query: \"münster\") { totalCount pageInfo { endCursor @@ -700,7 +958,7 @@ } } } - }) + }" end it "returns organization information" do @@ -708,29 +966,39 @@ expect(response.dig("data", "organizations", "totalCount")).to eq(10) expect(response.dig("data", "organizations", "types").length).to eq(5) - expect(response.dig("data", "organizations", "types").first).to eq("count" => 4, "id" => "education", "title" => "Education") + expect(response.dig("data", "organizations", "types").first).to eq( + "count" => 4, "id" => "education", "title" => "Education", + ) expect(response.dig("data", "organizations", "countries").length).to eq(1) - expect(response.dig("data", "organizations", "countries").first).to eq("count" => 10, "id" => "de", "title" => "Germany") + expect(response.dig("data", "organizations", "countries").first).to eq( + "count" => 10, "id" => "de", "title" => "Germany", + ) expect(response.dig("data", "organizations", "nodes").length).to eq(10) organization = response.dig("data", "organizations", "nodes", 0) expect(organization.fetch("id")).to eq("https://ror.org/01856cw59") expect(organization.fetch("name")).to eq("University Hospital Münster") - expect(organization.fetch("types")).to eq(["Healthcare"]) - expect(organization.fetch("country")).to eq("id" => "DE", "name" => "Germany") - expect(organization.fetch("alternateName")).to eq(["UKM"]) - expect(organization.fetch("url")).to eq(["http://klinikum.uni-muenster.de/"]) + expect(organization.fetch("types")).to eq(%w[Healthcare]) + expect(organization.fetch("country")).to eq( + "id" => "DE", "name" => "Germany", + ) + expect(organization.fetch("alternateName")).to eq(%w[UKM]) + expect(organization.fetch("url")).to eq( + %w[http://klinikum.uni-muenster.de/], + ) expect(organization.fetch("wikipediaUrl")).to be_nil expect(organization.fetch("identifiers").length).to eq(2) - expect(organization.fetch("identifiers").last).to eq("identifier" => "0000000405514246", "identifierType" => "isni") + expect(organization.fetch("identifiers").last).to eq( + "identifier" => "0000000405514246", "identifierType" => "isni", + ) end end describe "query organizations by type", elasticsearch: true, vcr: true do let(:query) do - %(query { - organizations(types: "government", country: "de", after: "MQ") { + "query { + organizations(types: \"government\", country: \"de\", after: \"MQ\") { totalCount pageInfo { endCursor @@ -769,29 +1037,43 @@ } } } - }) + }" end it "returns organization information" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "organizations", "totalCount")).to eq(182) - expect(response.dig("data", "organizations", "pageInfo", "endCursor")).to eq("Mg") - expect(response.dig("data", "organizations", "pageInfo", "hasNextPage")).to be true + expect( + response.dig("data", "organizations", "pageInfo", "endCursor"), + ).to eq("Mg") + expect( + response.dig("data", "organizations", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "organizations", "types").length).to eq(1) - expect(response.dig("data", "organizations", "types").first).to eq("count" => 182, "id" => "government", "title" => "Government") + expect(response.dig("data", "organizations", "types").first).to eq( + "count" => 182, "id" => "government", "title" => "Government", + ) expect(response.dig("data", "organizations", "countries").length).to eq(1) - expect(response.dig("data", "organizations", "countries").first).to eq("count" => 182, "id" => "de", "title" => "Germany") + expect(response.dig("data", "organizations", "countries").first).to eq( + "count" => 182, "id" => "de", "title" => "Germany", + ) expect(response.dig("data", "organizations", "nodes").length).to eq(20) organization = response.dig("data", "organizations", "nodes", 0) expect(organization.fetch("id")).to eq("https://ror.org/04bqwzd17") - expect(organization.fetch("name")).to eq("Bayerisches Landesamt für Gesundheit und Lebensmittelsicherheit") - expect(organization.fetch("types")).to eq(["Government"]) - expect(organization.fetch("country")).to eq("id" => "DE", "name" => "Germany") - expect(organization.fetch("alternateName")).to eq(["LGL"]) + expect(organization.fetch("name")).to eq( + "Bayerisches Landesamt für Gesundheit und Lebensmittelsicherheit", + ) + expect(organization.fetch("types")).to eq(%w[Government]) + expect(organization.fetch("country")).to eq( + "id" => "DE", "name" => "Germany", + ) + expect(organization.fetch("alternateName")).to eq(%w[LGL]) expect(organization.fetch("identifiers").length).to eq(2) - expect(organization.fetch("identifiers").first).to eq("identifier" => "grid.414279.d", "identifierType" => "grid") + expect(organization.fetch("identifiers").first).to eq( + "identifier" => "grid.414279.d", "identifierType" => "grid", + ) expect(organization.dig("works", "totalCount")).to eq(0) end diff --git a/spec/graphql/types/peer_review_type_spec.rb b/spec/graphql/types/peer_review_type_spec.rb index ec22eecdb..c9c865262 100644 --- a/spec/graphql/types/peer_review_type_spec.rb +++ b/spec/graphql/types/peer_review_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PeerReviewType do @@ -9,7 +11,16 @@ end describe "query peer reviews", elasticsearch: true do - let!(:peer_reviews) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" }, aasm_state: "findable") } + let!(:peer_reviews) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" + }, + aasm_state: "findable", + ) + end before do Doi.import @@ -18,14 +29,14 @@ end let(:query) do - %(query { + "query { peerReviews { totalCount nodes { id } } - }) + }" end it "returns all peer reviews" do @@ -38,16 +49,39 @@ end describe "query peer reviews by person", elasticsearch: true do - let!(:peer_reviews) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" }, aasm_state: "findable") } + let!(:peer_reviews) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" + }, + aasm_state: "findable", + ) + end let!(:peer_review) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "\"Peer review\"" + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -56,8 +90,8 @@ end let(:query) do - %(query { - peerReviews(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + peerReviews(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -68,14 +102,16 @@ id } } - }) + }" end it "returns peer reviews" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "peerReviews", "totalCount")).to eq(3) - expect(response.dig("data", "peerReviews", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "peerReviews", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "peerReviews", "nodes").length).to eq(3) # expect(response.dig("data", "peerReviews", "nodes", 0, "id")).to eq(@dois.first.identifier) end diff --git a/spec/graphql/types/person_type_spec.rb b/spec/graphql/types/person_type_spec.rb index 6466573a5..91cf8d451 100644 --- a/spec/graphql/types/person_type_spec.rb +++ b/spec/graphql/types/person_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PersonType do @@ -18,28 +20,66 @@ it { is_expected.to have_field(:citationCount).of_type("Int") } it { is_expected.to have_field(:viewCount).of_type("Int") } it { is_expected.to have_field(:downloadCount).of_type("Int") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal") } + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal", + ) + end it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal") } end describe "find person", elasticsearch: true, vcr: true do let(:client) { create(:client) } let(:doi) do - create(:doi, client: client, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + client: client, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:citation_event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } - let!(:citation_event2) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi2.doi}", relation_type_id: "is-referenced-by", occurred_at: "2016-06-13T16:14:19Z") } + let!(:citation_event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:citation_event2) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi2.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2016-06-13T16:14:19Z", + ) + end before do Client.import @@ -49,8 +89,8 @@ end let(:query) do - %(query { - person(id: "https://orcid.org/0000-0003-3484-6875") { + "query { + person(id: \"https://orcid.org/0000-0003-3484-6875\") { id name givenName @@ -101,40 +141,78 @@ } } } - }) + }" end it "returns person information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "person", "id")).to eq("https://orcid.org/0000-0003-3484-6875") + expect(response.dig("data", "person", "id")).to eq( + "https://orcid.org/0000-0003-3484-6875", + ) expect(response.dig("data", "person", "name")).to eq("K. J. Garza") expect(response.dig("data", "person", "givenName")).to eq("Kristian") expect(response.dig("data", "person", "familyName")).to eq("Garza") - expect(response.dig("data", "person", "alternateName")).to eq(["Kristian Javier Garza Gutierrez"]) + expect(response.dig("data", "person", "alternateName")).to eq( + ["Kristian Javier Garza Gutierrez"], + ) expect(response.dig("data", "person", "description")).to be_nil - expect(response.dig("data", "person", "links")).to eq([{ "name" => "Mendeley profile", "url" => "https://www.mendeley.com/profiles/kristian-g/" }, { "name" => "github", "url" => "https://github.com/kjgarza" }]) - expect(response.dig("data", "person", "identifiers")).to eq([{ "identifier" => "kjgarza", "identifierType" => "GitHub", "identifierUrl" => "https://github.com/kjgarza" }]) - expect(response.dig("data", "person", "country")).to eq("id" => "DE", "name" => "Germany") - expect(response.dig("data", "person", "employment")).to eq([{ "endDate" => nil, "organizationId" => nil, "organizationName" => "DataCite", "roleTitle" => "Application Developer", "startDate" => "2016-08-01T00:00:00Z" }]) + expect(response.dig("data", "person", "links")).to eq( + [ + { + "name" => "Mendeley profile", + "url" => "https://www.mendeley.com/profiles/kristian-g/", + }, + { "name" => "github", "url" => "https://github.com/kjgarza" }, + ], + ) + expect(response.dig("data", "person", "identifiers")).to eq( + [ + { + "identifier" => "kjgarza", + "identifierType" => "GitHub", + "identifierUrl" => "https://github.com/kjgarza", + }, + ], + ) + expect(response.dig("data", "person", "country")).to eq( + "id" => "DE", "name" => "Germany", + ) + expect(response.dig("data", "person", "employment")).to eq( + [ + { + "endDate" => nil, + "organizationId" => nil, + "organizationName" => "DataCite", + "roleTitle" => "Application Developer", + "startDate" => "2016-08-01T00:00:00Z", + }, + ], + ) expect(response.dig("data", "person", "citationCount")).to eq(2) expect(response.dig("data", "person", "viewCount")).to eq(0) expect(response.dig("data", "person", "downloadCount")).to eq(0) expect(response.dig("data", "person", "works", "totalCount")).to eq(1) - expect(response.dig("data", "person", "works", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "person", "works", "resourceTypes")).to eq([{ "count" => 1, "id" => "dataset", "title" => "Dataset" }]) + expect(response.dig("data", "person", "works", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) + expect(response.dig("data", "person", "works", "resourceTypes")).to eq( + [{ "count" => 1, "id" => "dataset", "title" => "Dataset" }], + ) expect(response.dig("data", "person", "works", "nodes").length).to eq(1) work = response.dig("data", "person", "works", "nodes", 0) - expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(work.dig("titles", 0, "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) expect(work.dig("citationCount")).to eq(2) end end describe "find person with employment", elasticsearch: true, vcr: true do let(:query) do - %(query { - person(id: "https://orcid.org/0000-0003-1419-2405") { + "query { + person(id: \"https://orcid.org/0000-0003-1419-2405\") { id name givenName @@ -162,46 +240,90 @@ endDate } } - }) + }" end it "returns person information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "person", "id")).to eq("https://orcid.org/0000-0003-1419-2405") + expect(response.dig("data", "person", "id")).to eq( + "https://orcid.org/0000-0003-1419-2405", + ) expect(response.dig("data", "person", "name")).to eq("Martin Fenner") expect(response.dig("data", "person", "givenName")).to eq("Martin") expect(response.dig("data", "person", "familyName")).to eq("Fenner") - expect(response.dig("data", "person", "alternateName")).to eq(["Martin Hellmut Fenner"]) - expect(response.dig("data", "person", "description")).to eq("Martin Fenner is the DataCite Technical Director since 2015. From 2012 to 2015 he was the technical lead for the PLOS Article-Level Metrics project. Martin has a medical degree from the Free University of Berlin and is a Board-certified medical oncologist.") - expect(response.dig("data", "person", "links")).to eq([{ "name" => "Twitter", "url" => "http://twitter.com/mfenner" }]) - expect(response.dig("data", "person", "identifiers")).to eq([{ "identifier" => "7006600825", - "identifierType" => "Scopus Author ID", - "identifierUrl" => - "http://www.scopus.com/inward/authorDetails.url?authorID=7006600825&partnerID=MN8TOARS" }, - { "identifier" => "000000035060549X", - "identifierType" => "ISNI", - "identifierUrl" => "http://isni.org/000000035060549X" }, - { "identifier" => "mfenner", - "identifierType" => "GitHub", - "identifierUrl" => "https://github.com/mfenner" }]) - expect(response.dig("data", "person", "country")).to eq("id" => "DE", "name" => "Germany") - expect(response.dig("data", "person", "employment")).to eq([ - { "organizationId" => "https://grid.ac/institutes/grid.475826.a", "organizationName" => "DataCite", "roleTitle" => "Technical Director", "startDate" => "2015-08-01T00:00:00Z", "endDate" => nil }, - { "organizationId" => "https://grid.ac/institutes/grid.10423.34", - "organizationName" => "Hannover Medical School", - "roleTitle" => "Clinical Fellow in Hematology and Oncology", "startDate" => "2005-11-01T00:00:00Z", "endDate" => "2017-05-01T00:00:00Z" }, - { "organizationId" => nil, "organizationName" => "Public Library of Science", "roleTitle" => "Technical lead article-level metrics project (contractor)", "startDate" => "2012-04-01T00:00:00Z", "endDate" => "2015-07-01T00:00:00Z" }, - { "organizationId" => nil, "organizationName" => "Charité Universitätsmedizin Berlin", - "roleTitle" => "Resident in Internal Medicine", "startDate" => "1998-09-01T00:00:00Z", "endDate" => "2005-10-01T00:00:00Z" }, - ]) + expect(response.dig("data", "person", "alternateName")).to eq( + ["Martin Hellmut Fenner"], + ) + expect(response.dig("data", "person", "description")).to eq( + "Martin Fenner is the DataCite Technical Director since 2015. From 2012 to 2015 he was the technical lead for the PLOS Article-Level Metrics project. Martin has a medical degree from the Free University of Berlin and is a Board-certified medical oncologist.", + ) + expect(response.dig("data", "person", "links")).to eq( + [{ "name" => "Twitter", "url" => "http://twitter.com/mfenner" }], + ) + expect(response.dig("data", "person", "identifiers")).to eq( + [ + { + "identifier" => "7006600825", + "identifierType" => "Scopus Author ID", + "identifierUrl" => + "http://www.scopus.com/inward/authorDetails.url?authorID=7006600825&partnerID=MN8TOARS", + }, + { + "identifier" => "000000035060549X", + "identifierType" => "ISNI", + "identifierUrl" => "http://isni.org/000000035060549X", + }, + { + "identifier" => "mfenner", + "identifierType" => "GitHub", + "identifierUrl" => "https://github.com/mfenner", + }, + ], + ) + expect(response.dig("data", "person", "country")).to eq( + "id" => "DE", "name" => "Germany", + ) + expect(response.dig("data", "person", "employment")).to eq( + [ + { + "organizationId" => "https://grid.ac/institutes/grid.475826.a", + "organizationName" => "DataCite", + "roleTitle" => "Technical Director", + "startDate" => "2015-08-01T00:00:00Z", + "endDate" => nil, + }, + { + "organizationId" => "https://grid.ac/institutes/grid.10423.34", + "organizationName" => "Hannover Medical School", + "roleTitle" => "Clinical Fellow in Hematology and Oncology", + "startDate" => "2005-11-01T00:00:00Z", + "endDate" => "2017-05-01T00:00:00Z", + }, + { + "organizationId" => nil, + "organizationName" => "Public Library of Science", + "roleTitle" => + "Technical lead article-level metrics project (contractor)", + "startDate" => "2012-04-01T00:00:00Z", + "endDate" => "2015-07-01T00:00:00Z", + }, + { + "organizationId" => nil, + "organizationName" => "Charité Universitätsmedizin Berlin", + "roleTitle" => "Resident in Internal Medicine", + "startDate" => "1998-09-01T00:00:00Z", + "endDate" => "2005-10-01T00:00:00Z", + }, + ], + ) end end describe "find person not found", elasticsearch: true, vcr: true do let(:query) do - %(query { - person(id: "https://orcid.org/xxxx") { + "query { + person(id: \"https://orcid.org/xxxx\") { id name givenName @@ -245,21 +367,29 @@ } } } - }) + }" end it "returns error" do response = LupoSchema.execute(query).as_json expect(response.dig("data")).to be_nil - expect(response.dig("errors")).to eq([{ "locations" => [{ "column" => 9, "line" => 2 }], "message" => "Record not found", "path" => ["person"] }]) + expect(response.dig("errors")).to eq( + [ + { + "locations" => [{ "column" => 9, "line" => 2 }], + "message" => "Record not found", + "path" => %w[person], + }, + ], + ) end end describe "find person account locked", elasticsearch: true, vcr: true do let(:query) do - %(query { - person(id: "https://orcid.org/0000-0003-1315-5960") { + "query { + person(id: \"https://orcid.org/0000-0003-1315-5960\") { id name givenName @@ -303,20 +433,29 @@ } } } - }) + }" end it "returns error" do response = LupoSchema.execute(query).as_json expect(response.dig("data")).to be_nil - expect(response.dig("errors")).to eq([{ "locations" => [{ "column" => 9, "line" => 2 }], "message" => "409 Conflict: The ORCID record is locked and cannot be edited. ORCID https://orcid.org/0000-0003-1315-5960", "path" => ["person"] }]) + expect(response.dig("errors")).to eq( + [ + { + "locations" => [{ "column" => 9, "line" => 2 }], + "message" => + "409 Conflict: The ORCID record is locked and cannot be edited. ORCID https://orcid.org/0000-0003-1315-5960", + "path" => %w[person], + }, + ], + ) end end describe "query all people", elasticsearch: true, vcr: true do let(:query) do - %(query { + "query { people { totalCount years { @@ -325,22 +464,26 @@ count } } - }) + }" end it "returns people information" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "people", "totalCount")).to eq(9688620) - expect(response.dig("data", "people", "years").first).to eq("count" => 44270, "id" => "2012", "title" => "2012") - expect(response.dig("data", "people", "years").last).to eq("count" => 1767011, "id" => "2020", "title" => "2020") + expect(response.dig("data", "people", "totalCount")).to eq(9_688_620) + expect(response.dig("data", "people", "years").first).to eq( + "count" => 44_270, "id" => "2012", "title" => "2012", + ) + expect(response.dig("data", "people", "years").last).to eq( + "count" => 1_767_011, "id" => "2020", "title" => "2020", + ) end end describe "query people", elasticsearch: true, vcr: true do let(:query) do - %(query { - people(query: "Fenner", first: 50, after: "NA") { + "query { + people(query: \"Fenner\", first: 50, after: \"NA\") { totalCount pageInfo { endCursor @@ -362,14 +505,16 @@ } } } - }) + }" end it "returns people information" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "people", "totalCount")).to eq(262) - expect(response.dig("data", "people", "pageInfo", "endCursor")).to eq("NQ") + expect(response.dig("data", "people", "pageInfo", "endCursor")).to eq( + "NQ", + ) # expect(response.dig("data", "people", "pageInfo", "hasNextPage")).to be true expect(response.dig("data", "people", "nodes").length).to eq(50) @@ -384,8 +529,8 @@ describe "query people with error", elasticsearch: true, vcr: true do let(:query) do - %(query { - people(query: "container.identifier:2658-719X") { + "query { + people(query: \"container.identifier:2658-719X\") { totalCount pageInfo { endCursor @@ -407,14 +552,16 @@ } } } - }) + }" end it "returns error" do response = LupoSchema.execute(query).as_json expect(response.dig("data")).to be_nil - expect(response.dig("errors", 0, "message")).to start_with("org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException") + expect(response.dig("errors", 0, "message")).to start_with( + "org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException", + ) end end end diff --git a/spec/graphql/types/physical_object_type_spec.rb b/spec/graphql/types/physical_object_type_spec.rb index 3a65c7334..19c0f6db8 100644 --- a/spec/graphql/types/physical_object_type_spec.rb +++ b/spec/graphql/types/physical_object_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PhysicalObjectType do diff --git a/spec/graphql/types/prefix_type_spec.rb b/spec/graphql/types/prefix_type_spec.rb index feeb63f74..df69d0043 100644 --- a/spec/graphql/types/prefix_type_spec.rb +++ b/spec/graphql/types/prefix_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PrefixType do diff --git a/spec/graphql/types/preprint_type_spec.rb b/spec/graphql/types/preprint_type_spec.rb index 9caa1cf6f..77cfdce43 100644 --- a/spec/graphql/types/preprint_type_spec.rb +++ b/spec/graphql/types/preprint_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PreprintType do @@ -9,8 +11,28 @@ end describe "query preprints", elasticsearch: true do - let!(:preprints) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "Preprint" }, agency: "datacite", aasm_state: "findable") } - let!(:posted_contents) { create_list(:doi, 2, types: { "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" }, agency: "crossref", aasm_state: "findable") } + let!(:preprints) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "Preprint" + }, + agency: "datacite", + aasm_state: "findable", + ) + end + let!(:posted_contents) do + create_list( + :doi, + 2, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" + }, + agency: "crossref", + aasm_state: "findable", + ) + end before do Doi.import @@ -19,7 +41,7 @@ end let(:query) do - %(query { + "query { preprints { totalCount registrationAgencies { @@ -36,33 +58,64 @@ } } } - }) + }" end it "returns all preprints" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "preprints", "totalCount")).to eq(4) - expect(response.dig("data", "preprints", "registrationAgencies")).to eq([{ "count" => 2, "id" => "crossref", "title" => "Crossref" }, - { "count" => 2, "id" => "datacite", "title" => "DataCite" }]) + expect(response.dig("data", "preprints", "registrationAgencies")).to eq( + [ + { "count" => 2, "id" => "crossref", "title" => "Crossref" }, + { "count" => 2, "id" => "datacite", "title" => "DataCite" }, + ], + ) expect(response.dig("data", "preprints", "nodes").length).to eq(4) - expect(response.dig("data", "preprints", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "preprints", "nodes", 0, "type")).to eq("Preprint") + expect(response.dig("data", "preprints", "nodes", 0, "id")).to eq( + @dois.first.identifier, + ) + expect(response.dig("data", "preprints", "nodes", 0, "type")).to eq( + "Preprint", + ) # expect(response.dig("data", "preprints", "nodes", 0, "registrationAgency")).to eq("id"=>"datacite", "name"=>"DataCite") end end describe "query preprints by person", elasticsearch: true do - let!(:preprints) { create_list(:doi, 3, types: { "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" }, aasm_state: "findable") } + let!(:preprints) do + create_list( + :doi, + 3, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" + }, + aasm_state: "findable", + ) + end let!(:preprint) do - create(:doi, types: { "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" }, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + types: { + "resourceTypeGeneral" => "Text", "resourceType" => "PostedContent" + }, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end before do Doi.import @@ -71,8 +124,8 @@ end let(:query) do - %(query { - preprints(userId: "https://orcid.org/0000-0003-1419-2405") { + "query { + preprints(userId: \"https://orcid.org/0000-0003-1419-2405\") { totalCount published { id @@ -83,14 +136,16 @@ id } } - }) + }" end it "returns preprints" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "preprints", "totalCount")).to eq(3) - expect(response.dig("data", "preprints", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "preprints", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "preprints", "nodes").length).to eq(3) # expect(response.dig("data", "preprints", "nodes", 0, "id")).to eq(@dois.first.identifier) end diff --git a/spec/graphql/types/publication_type_spec.rb b/spec/graphql/types/publication_type_spec.rb index 7fe2f2d64..9d5dee241 100644 --- a/spec/graphql/types/publication_type_spec.rb +++ b/spec/graphql/types/publication_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PublicationType do diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb index 7f38e4579..9bb5cabc0 100644 --- a/spec/graphql/types/query_type_spec.rb +++ b/spec/graphql/types/query_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe QueryType do @@ -7,52 +9,132 @@ it { is_expected.to have_field(:work).of_type("Work!") } it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal!") } it { is_expected.to have_field(:dataset).of_type("Dataset!") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal!") } + it do + is_expected.to have_field(:datasets).of_type( + "DatasetConnectionWithTotal!", + ) + end it { is_expected.to have_field(:publication).of_type("Publication!") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal!") } + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal!", + ) + end it { is_expected.to have_field(:software).of_type("Software!") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal!") } + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal!", + ) + end it { is_expected.to have_field(:service).of_type("Service!") } - it { is_expected.to have_field(:services).of_type("ServiceConnectionWithTotal!") } + it do + is_expected.to have_field(:services).of_type( + "ServiceConnectionWithTotal!", + ) + end it { is_expected.to have_field(:audiovisual).of_type("Audiovisual!") } - it { is_expected.to have_field(:audiovisuals).of_type("AudiovisualConnectionWithTotal!") } + it do + is_expected.to have_field(:audiovisuals).of_type( + "AudiovisualConnectionWithTotal!", + ) + end it { is_expected.to have_field(:collection).of_type("Collection!") } - it { is_expected.to have_field(:collections).of_type("CollectionConnectionWithTotal!") } + it do + is_expected.to have_field(:collections).of_type( + "CollectionConnectionWithTotal!", + ) + end it { is_expected.to have_field(:data_paper).of_type("DataPaper!") } - it { is_expected.to have_field(:data_papers).of_type("DataPaperConnectionWithTotal!") } + it do + is_expected.to have_field(:data_papers).of_type( + "DataPaperConnectionWithTotal!", + ) + end it { is_expected.to have_field(:image).of_type("Image!") } - it { is_expected.to have_field(:images).of_type("ImageConnectionWithTotal!") } - it { is_expected.to have_field(:interactive_resource).of_type("InteractiveResource!") } - it { is_expected.to have_field(:interactive_resources).of_type("InteractiveResourceConnectionWithTotal!") } + it do + is_expected.to have_field(:images).of_type("ImageConnectionWithTotal!") + end + it do + is_expected.to have_field(:interactive_resource).of_type( + "InteractiveResource!", + ) + end + it do + is_expected.to have_field(:interactive_resources).of_type( + "InteractiveResourceConnectionWithTotal!", + ) + end it { is_expected.to have_field(:event).of_type("Event!") } - it { is_expected.to have_field(:events).of_type("EventConnectionWithTotal!") } + it do + is_expected.to have_field(:events).of_type("EventConnectionWithTotal!") + end it { is_expected.to have_field(:model).of_type("Model!") } - it { is_expected.to have_field(:models).of_type("ModelConnectionWithTotal!") } - it { is_expected.to have_field(:physical_object).of_type("PhysicalObject!") } - it { is_expected.to have_field(:physical_objects).of_type("PhysicalObjectConnectionWithTotal!") } + it do + is_expected.to have_field(:models).of_type("ModelConnectionWithTotal!") + end + it do + is_expected.to have_field(:physical_object).of_type("PhysicalObject!") + end + it do + is_expected.to have_field(:physical_objects).of_type( + "PhysicalObjectConnectionWithTotal!", + ) + end it { is_expected.to have_field(:sound).of_type("Sound!") } - it { is_expected.to have_field(:sounds).of_type("SoundConnectionWithTotal!") } + it do + is_expected.to have_field(:sounds).of_type("SoundConnectionWithTotal!") + end it { is_expected.to have_field(:workflow).of_type("Workflow!") } - it { is_expected.to have_field(:workflows).of_type("WorkflowConnectionWithTotal!") } + it do + is_expected.to have_field(:workflows).of_type( + "WorkflowConnectionWithTotal!", + ) + end it { is_expected.to have_field(:other).of_type("Other!") } - it { is_expected.to have_field(:others).of_type("OtherConnectionWithTotal!") } + it do + is_expected.to have_field(:others).of_type("OtherConnectionWithTotal!") + end it { is_expected.to have_field(:member).of_type("Member!") } - it { is_expected.to have_field(:members).of_type("MemberConnectionWithTotal!") } + it do + is_expected.to have_field(:members).of_type("MemberConnectionWithTotal!") + end it { is_expected.to have_field(:repository).of_type("Repository!") } - it { is_expected.to have_field(:repositories).of_type("RepositoryConnectionWithTotal!") } + it do + is_expected.to have_field(:repositories).of_type( + "RepositoryConnectionWithTotal!", + ) + end it { is_expected.to have_field(:prefix).of_type("Prefix!") } - it { is_expected.to have_field(:prefixes).of_type("PrefixConnectionWithTotal!") } + it do + is_expected.to have_field(:prefixes).of_type("PrefixConnectionWithTotal!") + end it { is_expected.to have_field(:usage_report).of_type("UsageReport!") } - it { is_expected.to have_field(:usage_reports).of_type("UsageReportConnectionWithTotal!") } + it do + is_expected.to have_field(:usage_reports).of_type( + "UsageReportConnectionWithTotal!", + ) + end it { is_expected.to have_field(:funder).of_type("Funder!") } - it { is_expected.to have_field(:funders).of_type("FunderConnectionWithTotal!") } + it do + is_expected.to have_field(:funders).of_type("FunderConnectionWithTotal!") + end it { is_expected.to have_field(:data_catalog).of_type("DataCatalog!") } - it { is_expected.to have_field(:data_catalogs).of_type("DataCatalogConnectionWithTotal!") } + it do + is_expected.to have_field(:data_catalogs).of_type( + "DataCatalogConnectionWithTotal!", + ) + end it { is_expected.to have_field(:organization).of_type("Organization!") } - it { is_expected.to have_field(:organizations).of_type("OrganizationConnectionWithTotal!") } + it do + is_expected.to have_field(:organizations).of_type( + "OrganizationConnectionWithTotal!", + ) + end it { is_expected.to have_field(:person).of_type("Person!") } - it { is_expected.to have_field(:people).of_type("PersonConnectionWithTotal!") } + it do + is_expected.to have_field(:people).of_type("PersonConnectionWithTotal!") + end end end diff --git a/spec/graphql/types/registration_agency_type_spec.rb b/spec/graphql/types/registration_agency_type_spec.rb index c15935c8d..56a3428d0 100644 --- a/spec/graphql/types/registration_agency_type_spec.rb +++ b/spec/graphql/types/registration_agency_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe RegistrationAgencyType do diff --git a/spec/graphql/types/repository_prefix_type_spec.rb b/spec/graphql/types/repository_prefix_type_spec.rb index 91b7601f4..acec7d5fc 100644 --- a/spec/graphql/types/repository_prefix_type_spec.rb +++ b/spec/graphql/types/repository_prefix_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe RepositoryPrefixType do diff --git a/spec/graphql/types/repository_type_spec.rb b/spec/graphql/types/repository_type_spec.rb index ac1d18157..44a1613e7 100644 --- a/spec/graphql/types/repository_type_spec.rb +++ b/spec/graphql/types/repository_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe RepositoryType do @@ -18,22 +20,34 @@ it { is_expected.to have_field(:language).of_type("[String!]") } it { is_expected.to have_field(:issn).of_type("Issn") } - it { is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") } - it { is_expected.to have_field(:publications).of_type("PublicationConnectionWithTotal") } - it { is_expected.to have_field(:softwares).of_type("SoftwareConnectionWithTotal") } + it do + is_expected.to have_field(:datasets).of_type("DatasetConnectionWithTotal") + end + it do + is_expected.to have_field(:publications).of_type( + "PublicationConnectionWithTotal", + ) + end + it do + is_expected.to have_field(:softwares).of_type( + "SoftwareConnectionWithTotal", + ) + end it { is_expected.to have_field(:works).of_type("WorkConnectionWithTotal") } end describe "query repositories", elasticsearch: true do let!(:clients) { create_list(:client, 3, software: "Dataverse") } - let!(:client) { create(:client, software: "Dataverse", re3data_id: "10.17616/R3XS37") } + let!(:client) do + create(:client, software: "Dataverse", re3data_id: "10.17616/R3XS37") + end before do Client.import sleep 2 end let(:query) do - %(query { + "query { repositories(first: 10) { totalCount pageInfo { @@ -76,19 +90,27 @@ re3dataId } } - }) + }" end it "returns repositories" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "repositories", "totalCount")).to eq(4) - expect(response.dig("data", "repositories", "years")).to eq([{ "count" => 4, "id" => "2020", "title" => "2020" }]) - expect(response.dig("data", "repositories", "members")).to eq([{ "count" => 1, "title" => "My provider" }, - { "count" => 1, "title" => "My provider" }, - { "count" => 1, "title" => "My provider" }, - { "count" => 1, "title" => "My provider" }]) - expect(response.dig("data", "repositories", "software")).to eq([{ "count" => 4, "id" => "dataverse", "title" => "Dataverse" }]) + expect(response.dig("data", "repositories", "years")).to eq( + [{ "count" => 4, "id" => "2020", "title" => "2020" }], + ) + expect(response.dig("data", "repositories", "members")).to eq( + [ + { "count" => 1, "title" => "My provider" }, + { "count" => 1, "title" => "My provider" }, + { "count" => 1, "title" => "My provider" }, + { "count" => 1, "title" => "My provider" }, + ], + ) + expect(response.dig("data", "repositories", "software")).to eq( + [{ "count" => 4, "id" => "dataverse", "title" => "Dataverse" }], + ) expect(response.dig("data", "repositories", "certificates")).to be_empty # expect(response.dig("data", "repositories", "clientTypes")).to eq([{"count"=>4, "id"=>"repository", "title"=>"Repository"}]) # expect(response.dig("data", "repositories", "repositoryTypes")).to be_empty @@ -104,7 +126,12 @@ describe "find repository", elasticsearch: true do let(:provider) { create(:provider, symbol: "TESTC") } - let(:client) { create(:client, symbol: "TESTC.TESTC", alternate_name: "ABC", provider: provider) } + let(:client) do + create( + :client, + symbol: "TESTC.TESTC", alternate_name: "ABC", provider: provider, + ) + end let!(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:prefix) { create(:prefix) } let!(:client_prefixes) { create_list(:client_prefix, 3, client: client) } @@ -119,8 +146,8 @@ end let(:query) do - %(query { - repository(id: "testc.testc") { + "query { + repository(id: \"testc.testc\") { id name alternateName @@ -138,7 +165,7 @@ } } } - }) + }" end it "returns repository" do @@ -148,11 +175,19 @@ expect(response.dig("data", "repository", "name")).to eq("My data center") expect(response.dig("data", "repository", "alternateName")).to eq("ABC") - expect(response.dig("data", "repository", "datasets", "totalCount")).to eq(1) + expect( + response.dig("data", "repository", "datasets", "totalCount"), + ).to eq(1) - expect(response.dig("data", "repository", "prefixes", "totalCount")).to eq(3) - expect(response.dig("data", "repository", "prefixes", "years")).to eq([{ "count" => 3, "id" => "2020" }]) - expect(response.dig("data", "repository", "prefixes", "nodes").length).to eq(3) + expect( + response.dig("data", "repository", "prefixes", "totalCount"), + ).to eq(3) + expect(response.dig("data", "repository", "prefixes", "years")).to eq( + [{ "count" => 3, "id" => "2020" }], + ) + expect( + response.dig("data", "repository", "prefixes", "nodes").length, + ).to eq(3) prefix1 = response.dig("data", "repository", "prefixes", "nodes", 0) expect(prefix1.fetch("name")).to eq(client_prefixes.first.prefix_id) end @@ -162,19 +197,47 @@ let(:provider) { create(:provider, symbol: "TESTR") } let(:client) { create(:client, symbol: "TESTR.TESTR", provider: provider) } let(:doi) do - create(:doi, client: client, aasm_state: "findable", creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create( + :doi, + client: client, + aasm_state: "findable", + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi2) { create(:doi, client: client, aasm_state: "findable") } - let!(:citation_event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by", occurred_at: "2015-06-13T16:14:19Z") } - let!(:citation_event2) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi2.doi}", relation_type_id: "is-referenced-by", occurred_at: "2016-06-13T16:14:19Z") } + let!(:citation_event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2015-06-13T16:14:19Z", + ) + end + let!(:citation_event2) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi2.doi}", + relation_type_id: "is-referenced-by", + occurred_at: "2016-06-13T16:14:19Z", + ) + end before do Provider.import @@ -185,8 +248,8 @@ end let(:query) do - %(query { - repository(id: "testr.testr") { + "query { + repository(id: \"testr.testr\") { id name citationCount @@ -211,7 +274,7 @@ } } } - }) + }" end it "returns repository information" do @@ -221,9 +284,15 @@ expect(response.dig("data", "repository", "name")).to eq("My data center") expect(response.dig("data", "repository", "citationCount")).to eq(2) expect(response.dig("data", "repository", "works", "totalCount")).to eq(3) - expect(response.dig("data", "repository", "works", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) - expect(response.dig("data", "repository", "works", "resourceTypes")).to eq([{ "count" => 3, "id" => "dataset", "title" => "Dataset" }]) - expect(response.dig("data", "repository", "works", "nodes").length).to eq(3) + expect(response.dig("data", "repository", "works", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) + expect( + response.dig("data", "repository", "works", "resourceTypes"), + ).to eq([{ "count" => 3, "id" => "dataset", "title" => "Dataset" }]) + expect(response.dig("data", "repository", "works", "nodes").length).to eq( + 3, + ) # work = response.dig("data", "repository", "works", "nodes", 0) # expect(work.dig("titles", 0, "title")).to eq("Data from: A new malaria agent in African hominids.") diff --git a/spec/graphql/types/service_type_spec.rb b/spec/graphql/types/service_type_spec.rb index 880282edd..1368ac655 100644 --- a/spec/graphql/types/service_type_spec.rb +++ b/spec/graphql/types/service_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ServiceType do @@ -10,27 +12,35 @@ describe "query services", elasticsearch: true do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, symbol: "DATACITE.SERVICES", provider: provider) } + let(:client) do + create(:client, symbol: "DATACITE.SERVICES", provider: provider) + end let!(:services) do - create_list(:doi, 3, aasm_state: "findable", client: client, - types: { "resourceTypeGeneral" => "Service" }, titles: [{ "title" => "Test Service" }], subjects: - [{ - "subject": "FOS: Computer and information sciences", - "schemeUri": "http://www.oecd.org/science/inno/38235147.pdf", - "subjectScheme": "Fields of Science and Technology (FOS)", - }, - { - "subject": "Instrument", - "subjectScheme": "PidEntity", - }], - geo_locations: - [{ - "geoLocationPoint" => { - "pointLatitude" => "49.0850736", - "pointLongitude" => "-123.3300992", - }, - "geoLocationPlace" => "Munich, Germany", - }]) + create_list( + :doi, + 3, + aasm_state: "findable", + client: client, + types: { "resourceTypeGeneral" => "Service" }, + titles: [{ "title" => "Test Service" }], + subjects: [ + { + "subject": "FOS: Computer and information sciences", + "schemeUri": "http://www.oecd.org/science/inno/38235147.pdf", + "subjectScheme": "Fields of Science and Technology (FOS)", + }, + { "subject": "Instrument", "subjectScheme": "PidEntity" }, + ], + geo_locations: [ + { + "geoLocationPoint" => { + "pointLatitude" => "49.0850736", + "pointLongitude" => "-123.3300992", + }, + "geoLocationPlace" => "Munich, Germany", + }, + ], + ) end before do @@ -42,8 +52,8 @@ end let(:query) do - %(query { - services(pidEntity: "instrument") { + "query { + services(pidEntity: \"instrument\") { totalCount pageInfo { endCursor @@ -94,33 +104,65 @@ } } } - }) + }" end it "returns services" do response = LupoSchema.execute(query).as_json expect(response.dig("data", "services", "totalCount")).to eq(3) - expect(response.dig("data", "services", "pidEntities")).to eq([{ "count" => 3, "id" => "instrument", "title" => "Instrument" }]) - expect(response.dig("data", "services", "fieldsOfScience")).to eq([{ "count" => 3, - "id" => "computer_and_information_sciences", - "title" => "Computer and information sciences" }]) - expect(Base64.urlsafe_decode64(response.dig("data", "services", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) - expect(response.dig("data", "services", "pageInfo", "hasNextPage")).to be false - expect(response.dig("data", "services", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "services", "pidEntities")).to eq( + [{ "count" => 3, "id" => "instrument", "title" => "Instrument" }], + ) + expect(response.dig("data", "services", "fieldsOfScience")).to eq( + [ + { + "count" => 3, + "id" => "computer_and_information_sciences", + "title" => "Computer and information sciences", + }, + ], + ) + expect( + Base64.urlsafe_decode64( + response.dig("data", "services", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@dois.last.uid) + expect( + response.dig("data", "services", "pageInfo", "hasNextPage"), + ).to be false + expect(response.dig("data", "services", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "services", "nodes").length).to eq(3) service = response.dig("data", "services", "nodes", 0) expect(service.fetch("id")).to eq(@dois.first.identifier) expect(service.fetch("doi")).to eq(@dois.first.uid) - expect(service.fetch("identifiers")).to eq([{ "identifier" => "pk-1234", "identifierType" => "publisher ID" }]) + expect(service.fetch("identifiers")).to eq( + [{ "identifier" => "pk-1234", "identifierType" => "publisher ID" }], + ) expect(service.fetch("types")).to eq("resourceTypeGeneral" => "Service") expect(service.dig("titles", 0, "title")).to eq("Test Service") - expect(service.dig("descriptions", 0, "description")).to eq("Data from: A new malaria agent in African hominids.") - expect(service.dig("fieldsOfScience")).to eq([{ "id" => "computer_and_information_sciences", - "name" => "Computer and information sciences" }]) - expect(service.dig("geolocations", 0, "geolocationPlace")).to eq("Munich, Germany") - expect(service.dig("geolocations", 0, "geolocationPoint")).to eq("pointLatitude" => 49.0850736, "pointLongitude" => -123.3300992) + expect(service.dig("descriptions", 0, "description")).to eq( + "Data from: A new malaria agent in African hominids.", + ) + expect(service.dig("fieldsOfScience")).to eq( + [ + { + "id" => "computer_and_information_sciences", + "name" => "Computer and information sciences", + }, + ], + ) + expect(service.dig("geolocations", 0, "geolocationPlace")).to eq( + "Munich, Germany", + ) + expect(service.dig("geolocations", 0, "geolocationPoint")).to eq( + "pointLatitude" => 49.0850736, "pointLongitude" => -123.3300992, + ) end end end diff --git a/spec/graphql/types/software_type_spec.rb b/spec/graphql/types/software_type_spec.rb index 0ca83b83e..9bb2fbcec 100644 --- a/spec/graphql/types/software_type_spec.rb +++ b/spec/graphql/types/software_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe SoftwareType do @@ -9,7 +11,15 @@ end describe "software as formatted citation", elasticsearch: true do - let!(:software) { create(:doi, types: { "resourceTypeGeneral" => "Software" }, doi: "10.14454/12345", aasm_state: "findable", version_info: "1.0.1") } + let!(:software) do + create( + :doi, + types: { "resourceTypeGeneral" => "Software" }, + doi: "10.14454/12345", + aasm_state: "findable", + version_info: "1.0.1", + ) + end before do Doi.import sleep 2 @@ -17,19 +27,23 @@ end let(:query) do - %(query { - software(id: "https://doi.org/10.14454/12345") { + "query { + software(id: \"https://doi.org/10.14454/12345\") { id - formattedCitation(style: "apa") + formattedCitation(style: \"apa\") } - }) + }" end it "returns books" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "software", "id")).to eq("https://handle.test.datacite.org/" + software.uid) - expect(response.dig("data", "software", "formattedCitation")).to eq("Ollomo, B., Durand, P., Prugnolle, F., Douzery, E. J. P., Arnathau, C., Nkoghe, D., Leroy, E., & Renaud, F. (2011). Data from: A new malaria agent in African hominids. (Version 1.0.1) [Computer software]. Dryad Digital Repository. https://doi.org/10.14454/12345") + expect(response.dig("data", "software", "id")).to eq( + "https://handle.test.datacite.org/" + software.uid, + ) + expect(response.dig("data", "software", "formattedCitation")).to eq( + "Ollomo, B., Durand, P., Prugnolle, F., Douzery, E. J. P., Arnathau, C., Nkoghe, D., Leroy, E., & Renaud, F. (2011). Data from: A new malaria agent in African hominids. (Version 1.0.1) [Computer software]. Dryad Digital Repository. https://doi.org/10.14454/12345", + ) end end end diff --git a/spec/graphql/types/sound_type_spec.rb b/spec/graphql/types/sound_type_spec.rb index 12344caa1..e0eae8efc 100644 --- a/spec/graphql/types/sound_type_spec.rb +++ b/spec/graphql/types/sound_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe SoundType do diff --git a/spec/graphql/types/title_type_spec.rb b/spec/graphql/types/title_type_spec.rb index 62b588171..3397c36a6 100644 --- a/spec/graphql/types/title_type_spec.rb +++ b/spec/graphql/types/title_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe TitleType do diff --git a/spec/graphql/types/work_type_spec.rb b/spec/graphql/types/work_type_spec.rb index ed869b29d..012297fce 100644 --- a/spec/graphql/types/work_type_spec.rb +++ b/spec/graphql/types/work_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe WorkType do @@ -10,15 +12,20 @@ describe "find work", elasticsearch: true do let!(:work) do - create(:doi, aasm_state: "findable", container: - { "type" => "Journal", - "issue" => "9", - "title" => "Inorganica Chimica Acta", - "volume" => "362", - "lastPage" => "3180", - "firstPage" => "3172", - "identifier" => "0020-1693", - "identifierType" => "ISSN" }) + create( + :doi, + aasm_state: "findable", + container: { + "type" => "Journal", + "issue" => "9", + "title" => "Inorganica Chimica Acta", + "volume" => "362", + "lastPage" => "3180", + "firstPage" => "3172", + "identifier" => "0020-1693", + "identifierType" => "ISSN", + }, + ) end before do @@ -27,8 +34,10 @@ end let(:query) do - %(query { - work(id: "https://doi.org/#{work.doi}") { + "query { + work(id: \"https://doi.org/#{ + work.doi + }\") { id repository { id @@ -49,39 +58,73 @@ xml schemaOrg } - }) + }" end it "returns work" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "work", "id")).to eq("https://handle.test.datacite.org/#{work.doi.downcase}") - expect(response.dig("data", "work", "container")).to eq("identifier" => "0020-1693", "identifierType" => "ISSN", "title" => "Inorganica Chimica Acta") - expect(response.dig("data", "work", "repository", "id")).to eq(work.client_id) - expect(response.dig("data", "work", "repository", "name")).to eq(work.client.name) - expect(response.dig("data", "work", "member", "id")).to eq(work.provider_id) - expect(response.dig("data", "work", "member", "name")).to eq(work.provider.name) - expect(response.dig("data", "work", "id")).to eq("https://handle.test.datacite.org/#{work.doi.downcase}") - - bibtex = BibTeX.parse(response.dig("data", "work", "bibtex")).to_a(quotes: "").first + expect(response.dig("data", "work", "id")).to eq( + "https://handle.test.datacite.org/#{work.doi.downcase}", + ) + expect(response.dig("data", "work", "container")).to eq( + "identifier" => "0020-1693", + "identifierType" => "ISSN", + "title" => "Inorganica Chimica Acta", + ) + expect(response.dig("data", "work", "repository", "id")).to eq( + work.client_id, + ) + expect(response.dig("data", "work", "repository", "name")).to eq( + work.client.name, + ) + expect(response.dig("data", "work", "member", "id")).to eq( + work.provider_id, + ) + expect(response.dig("data", "work", "member", "name")).to eq( + work.provider.name, + ) + expect(response.dig("data", "work", "id")).to eq( + "https://handle.test.datacite.org/#{work.doi.downcase}", + ) + + bibtex = + BibTeX.parse(response.dig("data", "work", "bibtex")).to_a(quotes: ""). + first expect(bibtex[:bibtex_type].to_s).to eq("misc") expect(bibtex[:bibtex_key]).to eq("https://doi.org/#{work.doi.downcase}") - expect(bibtex[:author]).to eq("Ollomo, Benjamin and Durand, Patrick and Prugnolle, Franck and Douzery, Emmanuel J. P. and Arnathau, Céline and Nkoghe, Dieudonné and Leroy, Eric and Renaud, François") - expect(bibtex[:title]).to eq("Data from: A new malaria agent in African hominids.") + expect(bibtex[:author]).to eq( + "Ollomo, Benjamin and Durand, Patrick and Prugnolle, Franck and Douzery, Emmanuel J. P. and Arnathau, Céline and Nkoghe, Dieudonné and Leroy, Eric and Renaud, François", + ) + expect(bibtex[:title]).to eq( + "Data from: A new malaria agent in African hominids.", + ) expect(bibtex[:year]).to eq("2011") schema_org = JSON.parse(response.dig("data", "work", "schemaOrg")) expect(schema_org["@id"]).to eq("https://doi.org/#{work.doi.downcase}") - expect(schema_org["name"]).to eq("Data from: A new malaria agent in African hominids.") - - doc = Nokogiri::XML(response.dig("data", "work", "xml"), nil, "UTF-8", &:noblanks) + expect(schema_org["name"]).to eq( + "Data from: A new malaria agent in African hominids.", + ) + + doc = + Nokogiri.XML( + response.dig("data", "work", "xml"), + nil, + "UTF-8", + &:noblanks + ) expect(doc.at_css("identifier").content).to eq(work.doi) - expect(doc.at_css("titles").content).to eq("Data from: A new malaria agent in African hominids.") + expect(doc.at_css("titles").content).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end describe "find work with claims", elasticsearch: true, vcr: true do - let!(:work) { create(:doi, doi: "10.17863/cam.536", aasm_state: "findable") } + let!(:work) do + create(:doi, doi: "10.17863/cam.536", aasm_state: "findable") + end before do Doi.import @@ -89,8 +132,10 @@ end let(:query) do - %(query { - work(id: "https://doi.org/#{work.doi}") { + "query { + work(id: \"https://doi.org/#{ + work.doi + }\") { id claims { sourceId @@ -102,20 +147,36 @@ } } } - }) + }" end it "returns work" do - current_user = User.new(User.generate_token(uid: "0000-0001-5663-772X", aud: "stage")) - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json - - expect(response.dig("data", "work", "id")).to eq("https://handle.test.datacite.org/#{work.doi.downcase}") - expect(response.dig("data", "work", "claims")).to eq([{ "claimed" => "2017-10-16T11:15:01Z", "errorMessages" => [], "sourceId" => "orcid_update", "state" => "done" }]) + current_user = + User.new(User.generate_token(uid: "0000-0001-5663-772X", aud: "stage")) + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json + + expect(response.dig("data", "work", "id")).to eq( + "https://handle.test.datacite.org/#{work.doi.downcase}", + ) + expect(response.dig("data", "work", "claims")).to eq( + [ + { + "claimed" => "2017-10-16T11:15:01Z", + "errorMessages" => [], + "sourceId" => "orcid_update", + "state" => "done", + }, + ], + ) end end describe "find work with claims and errors", elasticsearch: true, vcr: true do - let!(:work) { create(:doi, doi: "10.70048/sc61-b496", aasm_state: "findable") } + let!(:work) do + create(:doi, doi: "10.70048/sc61-b496", aasm_state: "findable") + end before do Doi.import @@ -123,8 +184,10 @@ end let(:query) do - %(query { - work(id: "https://doi.org/#{work.doi}") { + "query { + work(id: \"https://doi.org/#{ + work.doi + }\") { id claims { sourceId @@ -136,23 +199,41 @@ } } } - }) + }" end it "returns work" do - current_user = User.new(User.generate_token(uid: "0000-0002-7352-517X", aud: "stage")) - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json - - expect(response.dig("data", "work", "id")).to eq("https://handle.test.datacite.org/#{work.doi.downcase}") - expect(response.dig("data", "work", "claims")).to eq([{ "claimed" => nil, "errorMessages" => [{ "status" => nil, "title" => "Missing data" }], "sourceId" => "orcid_update", "state" => "failed" }]) + current_user = + User.new(User.generate_token(uid: "0000-0002-7352-517X", aud: "stage")) + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json + + expect(response.dig("data", "work", "id")).to eq( + "https://handle.test.datacite.org/#{work.doi.downcase}", + ) + expect(response.dig("data", "work", "claims")).to eq( + [ + { + "claimed" => nil, + "errorMessages" => [{ "status" => nil, "title" => "Missing data" }], + "sourceId" => "orcid_update", + "state" => "failed", + }, + ], + ) end end describe "find work crossref", elasticsearch: true, vcr: true do let!(:work) do - create(:doi, doi: "10.1038/nature12373", agency: "crossref", aasm_state: "findable", titles: [ - { "title" => "Nanometre-scale thermometry in a living cell" }, - ]) + create( + :doi, + doi: "10.1038/nature12373", + agency: "crossref", + aasm_state: "findable", + titles: [{ "title" => "Nanometre-scale thermometry in a living cell" }], + ) end before do @@ -161,8 +242,10 @@ end let(:query) do - %(query { - work(id: "https://doi.org/#{work.doi}") { + "query { + work(id: \"https://doi.org/#{ + work.doi + }\") { id titles { title @@ -170,23 +253,29 @@ url contentUrl } - }) + }" end it "returns work" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "work", "id")).to eq("https://handle.test.datacite.org/#{work.doi.downcase}") - expect(response.dig("data", "work", "titles")).to eq([{ "title" => "Nanometre-scale thermometry in a living cell" }]) + expect(response.dig("data", "work", "id")).to eq( + "https://handle.test.datacite.org/#{work.doi.downcase}", + ) + expect(response.dig("data", "work", "titles")).to eq( + [{ "title" => "Nanometre-scale thermometry in a living cell" }], + ) expect(response.dig("data", "work", "url")).to eq(work.url) - expect(response.dig("data", "work", "contentUrl")).to eq("https://dash.harvard.edu/bitstream/1/12285462/1/Nanometer-Scale%20Thermometry.pdf") + expect(response.dig("data", "work", "contentUrl")).to eq( + "https://dash.harvard.edu/bitstream/1/12285462/1/Nanometer-Scale%20Thermometry.pdf", + ) end end describe "find work not found", elasticsearch: true do let(:query) do - %(query { - work(id: "https://doi.org/10.14454/xxx") { + "query { + work(id: \"https://doi.org/10.14454/xxx\") { id repository { id @@ -207,20 +296,28 @@ xml schemaOrg } - }) + }" end it "returns error" do response = LupoSchema.execute(query).as_json expect(response.dig("data")).to be_nil - expect(response.dig("errors")).to eq([{ "locations" => [{ "column" => 9, "line" => 2 }], "message" => "Record not found", "path" => ["work"] }]) + expect(response.dig("errors")).to eq( + [ + { + "locations" => [{ "column" => 9, "line" => 2 }], + "message" => "Record not found", + "path" => %w[work], + }, + ], + ) end end describe "query works", elasticsearch: true, vcr: true do let(:query) do - %(query($first: Int, $cursor: String) { + "query($first: Int, $cursor: String) { works(first: $first, after: $cursor) { totalCount totalCountFromCrossref @@ -276,65 +373,110 @@ count } } - }) + }" end - let(:provider) { create(:provider, symbol: "LPSW", ror_id: "https://ror.org/013meh722") } + let(:provider) do + create(:provider, symbol: "LPSW", ror_id: "https://ror.org/013meh722") + end let(:provider_without_ror) { create(:provider, ror_id: nil) } let(:client) { create(:client, provider: provider) } let(:client_without_ror) { create(:client, provider: provider_without_ror) } let!(:works) do - create_list(:doi, 10, aasm_state: "findable", client: client_without_ror, creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - }]) + create_list( + :doi, + 10, + aasm_state: "findable", + client: client_without_ror, + creators: [ + { + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + }, + ], + ) end let!(:doi) do - create(:doi, aasm_state: "findable", client: client_without_ror, creators: - [{ - "familyName" => "Garza", - "givenName" => "Kristian", - "name" => "Garza, Kristian", - "nameIdentifiers" => [{ "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme" => "ORCID", "schemeUri" => "https://orcid.org" }], - "nameType" => "Personal", - "affiliation": [ + create( + :doi, + aasm_state: "findable", + client: client_without_ror, + creators: [ { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", + "familyName" => "Garza", + "givenName" => "Kristian", + "name" => "Garza, Kristian", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://orcid.org/0000-0003-3484-6875", + "nameIdentifierScheme" => "ORCID", + "schemeUri" => "https://orcid.org", + }, + ], + "nameType" => "Personal", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], + }, + { + "name" => "University of Cambridge", + "affiliation": [ + { + "name": "University of Cambridge", + "affiliationIdentifier": "https://ror.org/013meh722", + "affiliationIdentifierScheme": "ROR", + }, + ], }, ], - }, - { - "name" => "University of Cambridge", - "affiliation": [ - { - "name": "University of Cambridge", - "affiliationIdentifier": "https://ror.org/013meh722", - "affiliationIdentifierScheme": "ROR", - }, - ], - }]) + ) end let!(:organization_doi) do - create(:doi, aasm_state: "findable", client: client_without_ror, creators: - [{ - "name" => "Department of Psychoceramics, University of Cambridge", - "nameIdentifiers" => [{ "nameIdentifier" => "https://ror.org/013meh722", "nameIdentifierScheme" => "ROR", "schemeUri" => "https://ror.org" }], - "nameType" => "Organizational", - }]) + create( + :doi, + aasm_state: "findable", + client: client_without_ror, + creators: [ + { + "name" => "Department of Psychoceramics, University of Cambridge", + "nameIdentifiers" => [ + { + "nameIdentifier" => "https://ror.org/013meh722", + "nameIdentifierScheme" => "ROR", + "schemeUri" => "https://ror.org", + }, + ], + "nameType" => "Organizational", + }, + ], + ) end let!(:funder_doi) do - create(:doi, aasm_state: "findable", client: client_without_ror, funding_references: - [{ - "funderIdentifier" => "https://doi.org/10.13039/501100000735", - "funderIdentifierType" => "Crossref Funder ID", - "funderName" => "University of Cambridge", - }]) + create( + :doi, + aasm_state: "findable", + client: client_without_ror, + funding_references: [ + { + "funderIdentifier" => "https://doi.org/10.13039/501100000735", + "funderIdentifierType" => "Crossref Funder ID", + "funderName" => "University of Cambridge", + }, + ], + ) end let!(:member_doi) { create(:doi, aasm_state: "findable", client: client) } @@ -345,51 +487,101 @@ end it "returns all works" do - response = LupoSchema.execute(query, variables: { first: 4, cursor: nil }).as_json + response = + LupoSchema.execute(query, variables: { first: 4, cursor: nil }).as_json expect(response.dig("data", "works", "totalCount")).to eq(14) - expect(response.dig("data", "works", "totalCountFromCrossref")).to eq(116990655) - expect(Base64.urlsafe_decode64(response.dig("data", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(@works[3].uid) - expect(response.dig("data", "works", "pageInfo", "hasNextPage")).to be true + expect(response.dig("data", "works", "totalCountFromCrossref")).to eq( + 116_990_655, + ) + expect( + Base64.urlsafe_decode64( + response.dig("data", "works", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@works[3].uid) + expect( + response.dig("data", "works", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "works", "nodes").length).to eq(4) - expect(response.dig("data", "works", "nodes", 0, "id")).to eq(@works[0].identifier) - expect(response.dig("data", "works", "nodes", 0, "creators", 1, "type")).to be nil + expect(response.dig("data", "works", "nodes", 0, "id")).to eq( + @works[0].identifier, + ) + expect( + response.dig("data", "works", "nodes", 0, "creators", 1, "type"), + ).to be nil end_cursor = response.dig("data", "works", "pageInfo", "endCursor") - response = LupoSchema.execute(query, variables: { first: 4, cursor: end_cursor }).as_json + response = + LupoSchema.execute(query, variables: { first: 4, cursor: end_cursor }). + as_json expect(response.dig("data", "works", "totalCount")).to eq(14) - expect(Base64.urlsafe_decode64(response.dig("data", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(@works[7].uid) - expect(response.dig("data", "works", "pageInfo", "hasNextPage")).to be true + expect( + Base64.urlsafe_decode64( + response.dig("data", "works", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@works[7].uid) + expect( + response.dig("data", "works", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "works", "nodes").length).to eq(4) - expect(response.dig("data", "works", "nodes", 0, "id")).to eq(@works[4].identifier) + expect(response.dig("data", "works", "nodes", 0, "id")).to eq( + @works[4].identifier, + ) end_cursor = response.dig("data", "works", "pageInfo", "endCursor") - response = LupoSchema.execute(query, variables: { first: 4, cursor: end_cursor }).as_json + response = + LupoSchema.execute(query, variables: { first: 4, cursor: end_cursor }). + as_json expect(response.dig("data", "works", "totalCount")).to eq(14) - expect(Base64.urlsafe_decode64(response.dig("data", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(@works[11].uid) - expect(response.dig("data", "works", "pageInfo", "hasNextPage")).to be true + expect( + Base64.urlsafe_decode64( + response.dig("data", "works", "pageInfo", "endCursor"), + ). + split(",", 2). + last, + ).to eq(@works[11].uid) + expect( + response.dig("data", "works", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "works", "nodes").length).to eq(4) - expect(response.dig("data", "works", "nodes", 0, "id")).to eq(@works[8].identifier) + expect(response.dig("data", "works", "nodes", 0, "id")).to eq( + @works[8].identifier, + ) end_cursor = response.dig("data", "works", "pageInfo", "endCursor") expect(response.dig("data", "associated", "totalCount")).to eq(4) - expect(response.dig("data", "associated", "published")).to eq([{ "count" => 4, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "associated", "published")).to eq( + [{ "count" => 4, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "contributed", "totalCount")).to eq(1) - expect(response.dig("data", "contributed", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "contributed", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "affiliated", "totalCount")).to eq(3) - expect(response.dig("data", "affiliated", "published")).to eq([{ "count" => 3, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "affiliated", "published")).to eq( + [{ "count" => 3, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "funded", "totalCount")).to eq(1) - expect(response.dig("data", "funded", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "funded", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) expect(response.dig("data", "hosted", "totalCount")).to eq(1) - expect(response.dig("data", "hosted", "published")).to eq([{ "count" => 1, "id" => "2011", "title" => "2011" }]) + expect(response.dig("data", "hosted", "published")).to eq( + [{ "count" => 1, "id" => "2011", "title" => "2011" }], + ) end end - describe "query works by registration agency", elasticsearch: true, vcr: true do + describe "query works by registration agency", + elasticsearch: true, vcr: true do let(:query) do - %(query($first: Int, $cursor: String, $registrationAgency: String) { + "query($first: Int, $cursor: String, $registrationAgency: String) { works(first: $first, after: $cursor, registrationAgency: $registrationAgency) { totalCount totalCountFromCrossref @@ -421,11 +613,19 @@ } } } - }) + }" end - let!(:works) { create_list(:doi, 10, aasm_state: "findable", language: "nl", agency: "datacite") } - let!(:work) { create(:doi, aasm_state: "findable", language: "de", agency: "crossref") } + let!(:works) do + create_list( + :doi, + 10, + aasm_state: "findable", language: "nl", agency: "datacite", + ) + end + let!(:work) do + create(:doi, aasm_state: "findable", language: "de", agency: "crossref") + end before do Doi.import @@ -434,24 +634,43 @@ end it "returns all works" do - response = LupoSchema.execute(query, variables: { first: 4, cursor: nil, registrationAgency: "datacite" }).as_json + response = + LupoSchema.execute( + query, + variables: { first: 4, cursor: nil, registrationAgency: "datacite" }, + ). + as_json expect(response.dig("data", "works", "totalCount")).to eq(10) - expect(response.dig("data", "works", "totalCountFromCrossref")).to eq(116990655) - expect(response.dig("data", "works", "registrationAgencies")).to eq([{ "count" => 10, "id" => "datacite", "title" => "DataCite" }]) - expect(response.dig("data", "works", "languages")).to eq([{ "count" => 10, "id" => "nl", "title" => "Dutch" }]) + expect(response.dig("data", "works", "totalCountFromCrossref")).to eq( + 116_990_655, + ) + expect(response.dig("data", "works", "registrationAgencies")).to eq( + [{ "count" => 10, "id" => "datacite", "title" => "DataCite" }], + ) + expect(response.dig("data", "works", "languages")).to eq( + [{ "count" => 10, "id" => "nl", "title" => "Dutch" }], + ) # expect(Base64.urlsafe_decode64(response.dig("data", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(@works[3].uid) - expect(response.dig("data", "works", "pageInfo", "hasNextPage")).to be true + expect( + response.dig("data", "works", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "works", "nodes").length).to eq(4) - expect(response.dig("data", "works", "nodes", 0, "registered")).to start_with(@works[0].registered[0..9]) - expect(response.dig("data", "works", "nodes", 0, "language")).to eq("id" => "nl", "name" => "Dutch") - expect(response.dig("data", "works", "nodes", 0, "registrationAgency")).to eq("id" => "datacite", "name" => "DataCite") + expect( + response.dig("data", "works", "nodes", 0, "registered"), + ).to start_with(@works[0].registered[0..9]) + expect(response.dig("data", "works", "nodes", 0, "language")).to eq( + "id" => "nl", "name" => "Dutch", + ) + expect( + response.dig("data", "works", "nodes", 0, "registrationAgency"), + ).to eq("id" => "datacite", "name" => "DataCite") end end describe "query works by license", elasticsearch: true do let(:query) do - %(query($first: Int, $cursor: String, $license: String) { + "query($first: Int, $cursor: String, $license: String) { works(first: $first, after: $cursor, license: $license) { totalCount pageInfo { @@ -478,16 +697,21 @@ } } } - }) + }" end let!(:works) do - create_list(:doi, 10, aasm_state: "findable", agency: "datacite", subjects: - [{ - "subject" => "Computer and information sciences", - }]) + create_list( + :doi, + 10, + aasm_state: "findable", + agency: "datacite", + subjects: [{ "subject" => "Computer and information sciences" }], + ) + end + let!(:work) do + create(:doi, aasm_state: "findable", agency: "crossref", rights_list: []) end - let!(:work) { create(:doi, aasm_state: "findable", agency: "crossref", rights_list: []) } before do Doi.import @@ -496,26 +720,57 @@ end it "returns all works" do - response = LupoSchema.execute(query, variables: { first: 4, cursor: nil, license: "cc0-1.0" }).as_json + response = + LupoSchema.execute( + query, + variables: { first: 4, cursor: nil, license: "cc0-1.0" }, + ). + as_json expect(response.dig("data", "works", "totalCount")).to eq(10) - expect(response.dig("data", "works", "licenses")).to eq([{ "count" => 10, "id" => "cc0-1.0", "title" => "CC0-1.0" }]) + expect(response.dig("data", "works", "licenses")).to eq( + [{ "count" => 10, "id" => "cc0-1.0", "title" => "CC0-1.0" }], + ) # expect(Base64.urlsafe_decode64(response.dig("data", "works", "pageInfo", "endCursor")).split(",", 2).last).to eq(@works[3].uid) - expect(response.dig("data", "works", "pageInfo", "hasNextPage")).to be true + expect( + response.dig("data", "works", "pageInfo", "hasNextPage"), + ).to be true expect(response.dig("data", "works", "nodes").length).to eq(4) - expect(response.dig("data", "works", "nodes", 0, "id")).to eq(@works[0].identifier) - expect(response.dig("data", "works", "nodes", 0, "registered")).to start_with(@works[0].registered[0..9]) - expect(response.dig("data", "works", "nodes", 0, "subjects")).to eq([{ "subject" => "Computer and information sciences", "subjectScheme" => nil }, { "subject" => "FOS: Computer and information sciences", "subjectScheme" => "Fields of Science and Technology (FOS)" }]) - expect(response.dig("data", "works", "nodes", 0, "rights")).to eq([{ "rights" => "Creative Commons Zero v1.0 Universal", - "rightsIdentifier" => "cc0-1.0", - "rightsUri" => "https://creativecommons.org/publicdomain/zero/1.0/legalcode" }]) + expect(response.dig("data", "works", "nodes", 0, "id")).to eq( + @works[0].identifier, + ) + expect( + response.dig("data", "works", "nodes", 0, "registered"), + ).to start_with(@works[0].registered[0..9]) + expect(response.dig("data", "works", "nodes", 0, "subjects")).to eq( + [ + { + "subject" => "Computer and information sciences", + "subjectScheme" => nil, + }, + { + "subject" => "FOS: Computer and information sciences", + "subjectScheme" => "Fields of Science and Technology (FOS)", + }, + ], + ) + expect(response.dig("data", "works", "nodes", 0, "rights")).to eq( + [ + { + "rights" => "Creative Commons Zero v1.0 Universal", + "rightsIdentifier" => "cc0-1.0", + "rightsUri" => + "https://creativecommons.org/publicdomain/zero/1.0/legalcode", + }, + ], + ) end end describe "create claim", elasticsearch: true, vcr: true do let(:query) do - %(mutation { - createClaim(doi: "10.5438/4hr0-d640", id: "d140d44e-af70-43ec-a90b-49878a954487", sourceId: "orcid_update") { + "mutation { + createClaim(doi: \"10.5438/4hr0-d640\", id: \"d140d44e-af70-43ec-a90b-49878a954487\", sourceId: \"orcid_update\") { claim { id state @@ -530,62 +785,99 @@ title } } - }) + }" end it "returns claim" do - current_user = User.new(User.generate_token(uid: "0000-0001-6528-2027", name: "Martin Fenner", has_orcid_token: true)) - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json - - expect(response.dig("data", "createClaim", "claim", "id")).to eq("d140d44e-af70-43ec-a90b-49878a954487") - expect(response.dig("data", "createClaim", "claim", "sourceId")).to eq("orcid_update") - expect(response.dig("data", "createClaim", "claim", "state")).to eq("failed") - expect(response.dig("data", "createClaim", "claim", "errorMessages")).to eq([{ "title" => "Missing data" }]) + current_user = + User.new( + User.generate_token( + uid: "0000-0001-6528-2027", + name: "Martin Fenner", + has_orcid_token: true, + ), + ) + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json + + expect(response.dig("data", "createClaim", "claim", "id")).to eq( + "d140d44e-af70-43ec-a90b-49878a954487", + ) + expect(response.dig("data", "createClaim", "claim", "sourceId")).to eq( + "orcid_update", + ) + expect(response.dig("data", "createClaim", "claim", "state")).to eq( + "failed", + ) + expect( + response.dig("data", "createClaim", "claim", "errorMessages"), + ).to eq([{ "title" => "Missing data" }]) expect(response.dig("data", "createClaim", "errors")).to be_empty end end describe "delete claim", elasticsearch: true, vcr: true do let(:query) do - %(mutation { - deleteClaim(id: "d140d44e-af70-43ec-a90b-49878a954487") { + "mutation { + deleteClaim(id: \"d140d44e-af70-43ec-a90b-49878a954487\") { message errors { status title } } - }) + }" end it "returns success message" do - current_user = User.new(User.generate_token(uid: "0000-0001-6528-2027", aud: "stage", has_orcid_token: true)) - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json - - expect(response.dig("data", "deleteClaim", "message")).to eq("Claim d140d44e-af70-43ec-a90b-49878a954487 deleted.") + current_user = + User.new( + User.generate_token( + uid: "0000-0001-6528-2027", aud: "stage", has_orcid_token: true, + ), + ) + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json + + expect(response.dig("data", "deleteClaim", "message")).to eq( + "Claim d140d44e-af70-43ec-a90b-49878a954487 deleted.", + ) expect(response.dig("data", "deleteClaim", "errors")).to be_blank end end describe "delete claim not found", elasticsearch: true, vcr: true do let(:query) do - %(mutation { - deleteClaim(id: "6dcaeca5-7e5a-449a-86b8-f2ae80db3fef") { + "mutation { + deleteClaim(id: \"6dcaeca5-7e5a-449a-86b8-f2ae80db3fef\") { message errors { status title } } - }) + }" end it "returns error message" do - current_user = User.new(User.generate_token(uid: "0000-0001-6528-2027", aud: "stage", has_orcid_token: true)) - response = LupoSchema.execute(query, context: { current_user: current_user }).as_json - - expect(response.dig("data", "deleteClaim", "message")).to eq("Error deleting claim 6dcaeca5-7e5a-449a-86b8-f2ae80db3fef.") - expect(response.dig("data", "deleteClaim", "errors")).to eq([{ "status" => 404, "title" => "Not found" }]) + current_user = + User.new( + User.generate_token( + uid: "0000-0001-6528-2027", aud: "stage", has_orcid_token: true, + ), + ) + response = + LupoSchema.execute(query, context: { current_user: current_user }). + as_json + + expect(response.dig("data", "deleteClaim", "message")).to eq( + "Error deleting claim 6dcaeca5-7e5a-449a-86b8-f2ae80db3fef.", + ) + expect(response.dig("data", "deleteClaim", "errors")).to eq( + [{ "status" => 404, "title" => "Not found" }], + ) end end end diff --git a/spec/graphql/types/workflow_type_spec.rb b/spec/graphql/types/workflow_type_spec.rb index a0d8197ae..ed756f005 100644 --- a/spec/graphql/types/workflow_type_spec.rb +++ b/spec/graphql/types/workflow_type_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe WorkflowType do diff --git a/spec/jobs/activity_import_by_id_job_spec.rb b/spec/jobs/activity_import_by_id_job_spec.rb index 9df6fdb15..6bceabe90 100644 --- a/spec/jobs/activity_import_by_id_job_spec.rb +++ b/spec/jobs/activity_import_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ActivityImportByIdJob, type: :job do @@ -5,8 +7,9 @@ subject(:job) { ActivityImportByIdJob.perform_later(activity.id) } it "queues the job" do - expect { job }.to have_enqueued_job(ActivityImportByIdJob). - on_queue("test_lupo_background") + expect { job }.to have_enqueued_job(ActivityImportByIdJob).on_queue( + "test_lupo_background", + ) end after do diff --git a/spec/jobs/datacite_doi_import_by_id_job_spec.rb b/spec/jobs/datacite_doi_import_by_id_job_spec.rb index ce04afd44..85bad8c56 100644 --- a/spec/jobs/datacite_doi_import_by_id_job_spec.rb +++ b/spec/jobs/datacite_doi_import_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataciteDoiImportByIdJob, type: :job do @@ -5,8 +7,9 @@ subject(:job) { DataciteDoiImportByIdJob.perform_later(doi.id) } it "queues the job" do - expect { job }.to have_enqueued_job(DataciteDoiImportByIdJob). - on_queue("test_lupo_import") + expect { job }.to have_enqueued_job(DataciteDoiImportByIdJob).on_queue( + "test_lupo_import", + ) end after do diff --git a/spec/jobs/event_import_by_id_job_spec.rb b/spec/jobs/event_import_by_id_job_spec.rb index 77173b1ec..46435ee54 100644 --- a/spec/jobs/event_import_by_id_job_spec.rb +++ b/spec/jobs/event_import_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe EventImportByIdJob, type: :job, vcr: true do @@ -5,8 +7,9 @@ subject(:job) { EventImportByIdJob.perform_later(event.id) } it "queues the job" do - expect { job }.to have_enqueued_job(EventImportByIdJob). - on_queue("test_lupo_background") + expect { job }.to have_enqueued_job(EventImportByIdJob).on_queue( + "test_lupo_background", + ) end after do diff --git a/spec/jobs/event_registrant_update_by_id_job_spec.rb b/spec/jobs/event_registrant_update_by_id_job_spec.rb index 6956f78d6..07be2678e 100644 --- a/spec/jobs/event_registrant_update_by_id_job_spec.rb +++ b/spec/jobs/event_registrant_update_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe EventRegistrantUpdateByIdJob, type: :job, vcr: true do @@ -5,8 +7,9 @@ subject(:job) { EventRegistrantUpdateByIdJob.perform_later(event.uuid) } it "queues the job" do - expect { job }.to have_enqueued_job(EventRegistrantUpdateByIdJob). - on_queue("test_lupo_background") + expect { job }.to have_enqueued_job(EventRegistrantUpdateByIdJob).on_queue( + "test_lupo_background", + ) end after do diff --git a/spec/jobs/handle_job_spec.rb b/spec/jobs/handle_job_spec.rb index 061f29ab5..a485d929b 100644 --- a/spec/jobs/handle_job_spec.rb +++ b/spec/jobs/handle_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe HandleJob, type: :job do @@ -5,8 +7,7 @@ subject(:job) { HandleJob.perform_later(doi.doi) } it "queues the job" do - expect { job }.to have_enqueued_job(HandleJob). - on_queue("test_lupo") + expect { job }.to have_enqueued_job(HandleJob).on_queue("test_lupo") end after do diff --git a/spec/jobs/import_doi_job_spec.rb b/spec/jobs/import_doi_job_spec.rb index 1c6572fb5..17cc4b550 100644 --- a/spec/jobs/import_doi_job_spec.rb +++ b/spec/jobs/import_doi_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ImportDoiJob, type: :job do @@ -5,8 +7,11 @@ subject(:job) { ImportDoiJob.perform_later(doi.doi) } it "queues the job" do - expect { job }.to have_enqueued_job(ImportDoiJob). - on_queue("test_lupo_background").at_least(1).times + expect { job }.to have_enqueued_job(ImportDoiJob).on_queue( + "test_lupo_background", + ). + at_least(1). + times end after do diff --git a/spec/jobs/index_background_job_spec.rb b/spec/jobs/index_background_job_spec.rb index 4cb8724fc..a16663fd2 100644 --- a/spec/jobs/index_background_job_spec.rb +++ b/spec/jobs/index_background_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe IndexBackgroundJob, type: :job do @@ -5,8 +7,11 @@ subject(:job) { IndexBackgroundJob.perform_later(doi) } it "queues the job" do - expect { job }.to have_enqueued_job(IndexBackgroundJob). - on_queue("test_lupo_background").at_least(1).times + expect { job }.to have_enqueued_job(IndexBackgroundJob).on_queue( + "test_lupo_background", + ). + at_least(1). + times end after do diff --git a/spec/jobs/index_job_spec.rb b/spec/jobs/index_job_spec.rb index 83357e200..1f3cd247c 100644 --- a/spec/jobs/index_job_spec.rb +++ b/spec/jobs/index_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe IndexJob, type: :job do @@ -5,8 +7,9 @@ subject(:job) { IndexJob.perform_later(doi) } it "queues the job" do - expect { job }.to have_enqueued_job(IndexJob). - on_queue("test_lupo").at_least(1).times + expect { job }.to have_enqueued_job(IndexJob).on_queue("test_lupo"). + at_least(1). + times end after do diff --git a/spec/jobs/orcid_auto_update_by_id_job_spec.rb b/spec/jobs/orcid_auto_update_by_id_job_spec.rb index 242f28321..3e88567f7 100644 --- a/spec/jobs/orcid_auto_update_by_id_job_spec.rb +++ b/spec/jobs/orcid_auto_update_by_id_job_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # describe OrcidAutoUpdateByIdJob, type: :job do diff --git a/spec/jobs/other_doi_by_id_job.rb b/spec/jobs/other_doi_by_id_job.rb index 83801754b..031398c6b 100644 --- a/spec/jobs/other_doi_by_id_job.rb +++ b/spec/jobs/other_doi_by_id_job.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe OtherDoiByIdJob, type: :job do @@ -5,8 +7,11 @@ subject(:job) { OtherDoiByIdJob.perform_later(doi) } it "queues the job" do - expect { job }.to have_enqueued_job(OtherDoiByIdJob). - on_queue("test_lupo_background").at_least(1).times + expect { job }.to have_enqueued_job(OtherDoiByIdJob).on_queue( + "test_lupo_background", + ). + at_least(1). + times end after do diff --git a/spec/jobs/other_doi_import_by_id_job_spec.rb b/spec/jobs/other_doi_import_by_id_job_spec.rb index 058013693..ea396a736 100644 --- a/spec/jobs/other_doi_import_by_id_job_spec.rb +++ b/spec/jobs/other_doi_import_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe OtherDoiImportByIdJob, type: :job do @@ -5,8 +7,9 @@ subject(:job) { OtherDoiImportByIdJob.perform_later(doi.id) } it "queues the job" do - expect { job }.to have_enqueued_job(OtherDoiImportByIdJob). - on_queue("test_lupo_import_other_doi") + expect { job }.to have_enqueued_job(OtherDoiImportByIdJob).on_queue( + "test_lupo_import_other_doi", + ) end after do diff --git a/spec/jobs/target_doi_by_id_job_spec.rb b/spec/jobs/target_doi_by_id_job_spec.rb index 4de07b4a3..00fcad6fb 100644 --- a/spec/jobs/target_doi_by_id_job_spec.rb +++ b/spec/jobs/target_doi_by_id_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe TargetDoiByIdJob, type: :job do @@ -5,8 +7,11 @@ subject(:job) { TargetDoiByIdJob.perform_later(doi) } it "queues the job" do - expect { job }.to have_enqueued_job(TargetDoiByIdJob). - on_queue("test_lupo_background").at_least(1).times + expect { job }.to have_enqueued_job(TargetDoiByIdJob).on_queue( + "test_lupo_background", + ). + at_least(1). + times end after do diff --git a/spec/jobs/update_doi_job_spec.rb b/spec/jobs/update_doi_job_spec.rb index 38846a7a7..73f724dc4 100644 --- a/spec/jobs/update_doi_job_spec.rb +++ b/spec/jobs/update_doi_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe UpdateDoiJob, type: :job do @@ -5,8 +7,11 @@ subject(:job) { UpdateDoiJob.perform_later(doi.doi) } it "queues the job" do - expect { job }.to have_enqueued_job(UpdateDoiJob). - on_queue("test_lupo_background").at_least(1).times + expect { job }.to have_enqueued_job(UpdateDoiJob).on_queue( + "test_lupo_background", + ). + at_least(1). + times end after do diff --git a/spec/jobs/update_state_job_spec.rb b/spec/jobs/update_state_job_spec.rb index 8eee44054..65326b1a0 100644 --- a/spec/jobs/update_state_job_spec.rb +++ b/spec/jobs/update_state_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe UpdateStateJob, type: :job do @@ -5,8 +7,9 @@ subject(:job) { UpdateStateJob.perform_later(doi.doi) } it "queues the job" do - expect { job }.to have_enqueued_job(UpdateStateJob). - on_queue("test_lupo_background") + expect { job }.to have_enqueued_job(UpdateStateJob).on_queue( + "test_lupo_background", + ) end after do diff --git a/spec/jobs/url_job_spec.rb b/spec/jobs/url_job_spec.rb index e0a360c32..8dfa7baa0 100644 --- a/spec/jobs/url_job_spec.rb +++ b/spec/jobs/url_job_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe UrlJob, type: :job do @@ -5,8 +7,7 @@ subject(:job) { UrlJob.perform_later(doi.doi) } it "queues the job" do - expect { job }.to have_enqueued_job(UrlJob). - on_queue("test_lupo") + expect { job }.to have_enqueued_job(UrlJob).on_queue("test_lupo") end after do diff --git a/spec/lib/tasks/datacite_doi_rake_spec.rb b/spec/lib/tasks/datacite_doi_rake_spec.rb index 3efe54bc1..16a19bbc2 100644 --- a/spec/lib/tasks/datacite_doi_rake_spec.rb +++ b/spec/lib/tasks/datacite_doi_rake_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" # describe "datacite_doi:create_index", order: :defined do diff --git a/spec/lib/tasks/doi_rake_spec.rb b/spec/lib/tasks/doi_rake_spec.rb index ea0ae7d73..06bbc859e 100644 --- a/spec/lib/tasks/doi_rake_spec.rb +++ b/spec/lib/tasks/doi_rake_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" # describe "doi:create_index", order: :defined do @@ -77,7 +79,9 @@ include_context "rake" let!(:doi) { create(:doi, aasm_state: "findable") } - let(:output) { "0 DOIs found that are not registered in the Handle system.\n" } + let(:output) do + "0 DOIs found that are not registered in the Handle system.\n" + end it "prerequisites should include environment" do expect(subject.prerequisites).to include("environment") @@ -125,7 +129,9 @@ include_context "rake" let!(:doi) { create(:doi, aasm_state: "findable") } - let(:output) { "[SetRegistrationAgency] 0 Dois with [SetRegistrationAgency].\n" } + let(:output) do + "[SetRegistrationAgency] 0 Dois with [SetRegistrationAgency].\n" + end it "prerequisites should include environment" do expect(subject.prerequisites).to include("environment") @@ -253,22 +259,25 @@ include_context "rake" let(:doi) do - create(:doi, aasm_state: "findable", landing_page: - { - "checked" => Time.zone.now.utc.iso8601, - "status" => 200, - "url" => "https://example.org", - "contentType" => "text/html", - "error" => nil, - "redirectCount" => 0, - "redirectUrls" => [], - "downloadLatency" => 200, - "hasSchemaOrg" => true, - "schemaOrgId" => "10.14454/10703", - "dcIdentifier" => nil, - "citationDoi" => nil, - "bodyHasPid" => true, - }) + create( + :doi, + aasm_state: "findable", + landing_page: { + "checked" => Time.zone.now.utc.iso8601, + "status" => 200, + "url" => "https://example.org", + "contentType" => "text/html", + "error" => nil, + "redirectCount" => 0, + "redirectUrls" => [], + "downloadLatency" => 200, + "hasSchemaOrg" => true, + "schemaOrgId" => "10.14454/10703", + "dcIdentifier" => nil, + "citationDoi" => nil, + "bodyHasPid" => true, + }, + ) end let(:output) { "Updated landing page data for DOI #{doi.doi}\n" } diff --git a/spec/lib/tasks/other_doi_rake_spec.rb b/spec/lib/tasks/other_doi_rake_spec.rb index cf1d763df..7fc2c1a00 100644 --- a/spec/lib/tasks/other_doi_rake_spec.rb +++ b/spec/lib/tasks/other_doi_rake_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" # describe "other_doi:create_index", order: :defined do diff --git a/spec/models/ability_spec.rb b/spec/models/ability_spec.rb index b8b58102d..2bdfa4a09 100644 --- a/spec/models/ability_spec.rb +++ b/spec/models/ability_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" require "cancan/matchers" @@ -5,11 +7,20 @@ let(:token) { User.generate_token } let(:user) { User.new(token) } let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM") } - let(:provider) { create(:provider, consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION") } + let(:provider) do + create( + :provider, + consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION", + ) + end let(:client) { create(:client, provider: provider) } let(:prefix) { create(:prefix, uid: "10.14454") } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix) } - let(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } + let!(:client_prefix) do + create(:client_prefix, client: client, prefix: prefix) + end + let(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end let(:doi) { create(:doi, client: client) } let(:media) { create(:media, doi: doi) } let(:xml) { file_fixture("datacite.xml").read } @@ -53,7 +64,13 @@ end context "when is a client admin" do - let(:token) { User.generate_token(role_id: "client_admin", provider_id: provider.symbol.downcase, client_id: client.symbol.downcase) } + let(:token) do + User.generate_token( + role_id: "client_admin", + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + ) + end it { is_expected.to be_able_to(:read, user) } it { is_expected.to be_able_to(:read, provider) } @@ -86,7 +103,13 @@ end context "when is a client user" do - let(:token) { User.generate_token(role_id: "client_user", provider_id: provider.symbol.downcase, client_id: client.symbol.downcase) } + let(:token) do + User.generate_token( + role_id: "client_user", + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + ) + end it { is_expected.to be_able_to(:read, user) } it { is_expected.to be_able_to(:read, provider) } @@ -119,7 +142,11 @@ end context "when is a provider admin" do - let(:token) { User.generate_token(role_id: "provider_admin", provider_id: provider.symbol.downcase) } + let(:token) do + User.generate_token( + role_id: "provider_admin", provider_id: provider.symbol.downcase, + ) + end it { is_expected.to be_able_to(:read, user) } @@ -152,7 +179,11 @@ end context "when is a consortium admin" do - let(:token) { User.generate_token(role_id: "consortium_admin", provider_id: consortium.symbol.downcase) } + let(:token) do + User.generate_token( + role_id: "consortium_admin", provider_id: consortium.symbol.downcase, + ) + end it { is_expected.to be_able_to(:read, user) } @@ -190,7 +221,11 @@ end context "when is a provider user" do - let(:token) { User.generate_token(role_id: "provider_user", provider_id: provider.symbol.downcase) } + let(:token) do + User.generate_token( + role_id: "provider_user", provider_id: provider.symbol.downcase, + ) + end it { is_expected.to be_able_to(:read, user) } diff --git a/spec/models/activity_spec.rb b/spec/models/activity_spec.rb index e702e436d..4674ea8b3 100644 --- a/spec/models/activity_spec.rb +++ b/spec/models/activity_spec.rb @@ -1,8 +1,10 @@ +# frozen_string_literal: true + require "rails_helper" describe Activity, type: :model do context "create doi" do - let(:client) { create(:client) } + let(:client) { create(:client) } let(:doi) { create(:doi, client: client) } it "activity exists" do @@ -12,12 +14,19 @@ # expect(activity.username).to eq(2) expect(activity.request_uuid).to be_present expect(activity.changes["aasm_state"]).to eq("draft") - expect(activity.changes["types"]).to eq("bibtex" => "misc", "citeproc" => "dataset", "resourceType" => "DataPackage", "resourceTypeGeneral" => "Dataset", "ris" => "DATA", "schemaOrg" => "Dataset") + expect(activity.changes["types"]).to eq( + "bibtex" => "misc", + "citeproc" => "dataset", + "resourceType" => "DataPackage", + "resourceTypeGeneral" => "Dataset", + "ris" => "DATA", + "schemaOrg" => "Dataset", + ) end end context "update doi" do - let(:client) { create(:client) } + let(:client) { create(:client) } let(:doi) { create(:doi, client: client) } it "activity exists" do @@ -28,7 +37,7 @@ expect(activity.auditable.uid).to eq(doi.uid) # expect(activity.username).to eq(2) expect(activity.request_uuid).to be_present - expect(activity.changes).to eq("aasm_state" => ["draft", "findable"]) + expect(activity.changes).to eq("aasm_state" => %w[draft findable]) end end @@ -57,12 +66,14 @@ expect(activity.auditable.uid).to eq(provider.uid) expect(activity.request_uuid).to be_present - expect(activity.changes).to eq("non_profit_status" => ["non-profit", "for-profit"]) + expect(activity.changes).to eq( + "non_profit_status" => %w[non-profit for-profit], + ) end end context "create client" do - let(:client) { create(:client) } + let(:client) { create(:client) } it "activity exists" do expect(client.activities.length).to eq(1) @@ -76,7 +87,7 @@ end context "update client" do - let(:client) { create(:client) } + let(:client) { create(:client) } it "activity exists" do client.update(client_type: "periodical") @@ -86,7 +97,7 @@ expect(activity.auditable.uid).to eq(client.uid) expect(activity.request_uuid).to be_present - expect(activity.changes).to eq("client_type" => ["repository", "periodical"]) + expect(activity.changes).to eq("client_type" => %w[repository periodical]) end end end diff --git a/spec/models/client_prefix_spec.rb b/spec/models/client_prefix_spec.rb index aed19e5a2..76157e25f 100644 --- a/spec/models/client_prefix_spec.rb +++ b/spec/models/client_prefix_spec.rb @@ -1,11 +1,20 @@ +# frozen_string_literal: true + require "rails_helper" describe ClientPrefix, type: :model do let(:provider) { create(:provider) } let(:client) { create(:client, provider: provider) } let(:prefix) { create(:prefix, uid: "10.5083") } - let(:provider_prefix) { create(:provider_prefix, prefix: prefix, provider: provider) } - subject { create(:client_prefix, client: client, prefix: prefix, provider_prefix: provider_prefix) } + let(:provider_prefix) do + create(:provider_prefix, prefix: prefix, provider: provider) + end + subject do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix: provider_prefix, + ) + end describe "Validations" do it { should validate_presence_of(:client) } diff --git a/spec/models/client_spec.rb b/spec/models/client_spec.rb index f3cc10b37..a00045d1b 100644 --- a/spec/models/client_spec.rb +++ b/spec/models/client_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Client, type: :model do @@ -17,7 +19,9 @@ params = client.to_jsonapi expect(params.dig("id")).to eq(client.symbol.downcase) expect(params.dig("attributes", "symbol")).to eq(client.symbol) - expect(params.dig("attributes", "system-email")).to eq(client.system_email) + expect(params.dig("attributes", "system-email")).to eq( + client.system_email, + ) expect(params.dig("attributes", "provider-id")).to eq(client.provider_id) expect(params.dig("attributes", "is-active")).to be true end @@ -28,11 +32,22 @@ let!(:prefix) { prefixes.first } ### Order is important in creating prefixes relations - let!(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } - let!(:provider_prefix_more) { create(:provider_prefix, provider: provider, prefix: prefixes.last) } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid) } + let!(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end + let!(:provider_prefix_more) do + create(:provider_prefix, provider: provider, prefix: prefixes.last) + end + let!(:client_prefix) do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid, + ) + end - let(:new_provider) { create(:provider, symbol: "QUECHUA", member_type: "direct_member") } + let(:new_provider) do + create(:provider, symbol: "QUECHUA", member_type: "direct_member") + end let(:provider_target_id) { new_provider.symbol } let(:bad_provider_target_id) { "SALS" } @@ -60,7 +75,9 @@ end context "to member_only" do - let(:new_provider) { create(:provider, symbol: "QUECHUA", member_type: "member_only") } + let(:new_provider) do + create(:provider, symbol: "QUECHUA", member_type: "member_only") + end let(:provider_target_id) { new_provider.symbol } it "it doesn't transfer" do @@ -73,7 +90,12 @@ end context "to consortium_organization" do - let(:new_provider) { create(:provider, symbol: "QUECHUA", member_type: "consortium_organization") } + let(:new_provider) do + create( + :provider, + symbol: "QUECHUA", member_type: "consortium_organization", + ) + end let(:provider_target_id) { new_provider.symbol } it "works" do @@ -89,7 +111,9 @@ end context "to consortium" do - let(:new_provider) { create(:provider, symbol: "QUECHUA", role_name: "ROLE_CONSORTIUM") } + let(:new_provider) do + create(:provider, symbol: "QUECHUA", role_name: "ROLE_CONSORTIUM") + end let(:provider_target_id) { new_provider.symbol } it "it doesn't transfer" do @@ -106,9 +130,18 @@ let!(:prefixes) { create_list(:prefix, 3) } let!(:prefix) { prefixes.first } ### Order is important in creating prefixes relations - let!(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } - let!(:provider_prefix_more) { create(:provider_prefix, provider: provider, prefix: prefixes.last) } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid) } + let!(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end + let!(:provider_prefix_more) do + create(:provider_prefix, provider: provider, prefix: prefixes.last) + end + let!(:client_prefix) do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid, + ) + end let(:new_provider) { create(:provider, symbol: "QUECHUA") } it "works" do @@ -131,7 +164,9 @@ end describe "issn" do - let(:client) { build(:client, provider: provider, client_type: "periodical") } + let(:client) do + build(:client, provider: provider, client_type: "periodical") + end it "should support issn" do client.issn = { "issnl" => "1544-9173" } @@ -148,27 +183,31 @@ it "should reject invalid issn" do client.issn = { "issnl" => "1544-91XX" } expect(client.save).to be false - expect(client.errors.details).to eq(issn: [{ error: "ISSN-L 1544-91XX is in the wrong format." }]) + expect(client.errors.details).to eq( + issn: [{ error: "ISSN-L 1544-91XX is in the wrong format." }], + ) end end describe "certificate" do - let(:client) { build(:client, provider: provider, client_type: "repository") } + let(:client) do + build(:client, provider: provider, client_type: "repository") + end it "should support certificate" do - client.certificate = ["CoreTrustSeal"] + client.certificate = %w[CoreTrustSeal] expect(client.save).to be true expect(client.errors.details).to be_empty end it "should support certificate" do - client.certificate = ["CLARIN"] + client.certificate = %w[CLARIN] expect(client.save).to be true expect(client.errors.details).to be_empty end it "should support multiple certificates" do - client.certificate = ["WDS", "DSA"] + client.certificate = %w[WDS DSA] expect(client.save).to be true expect(client.errors.details).to be_empty end @@ -176,7 +215,14 @@ it "should reject unknown certificate" do client.certificate = ["MyHomeGrown Certificate"] expect(client.save).to be false - expect(client.errors.details).to eq(certificate: [{ error: "Certificate MyHomeGrown Certificate is not included in the list of supported certificates." }]) + expect(client.errors.details).to eq( + certificate: [ + { + error: + "Certificate MyHomeGrown Certificate is not included in the list of supported certificates.", + }, + ], + ) end end @@ -208,7 +254,9 @@ it "invalid" do subject.salesforce_id = "abc" expect(subject.save).to be false - expect(subject.errors.details).to eq(salesforce_id: [{ error: :invalid, value: "abc" }]) + expect(subject.errors.details).to eq( + salesforce_id: [{ error: :invalid, value: "abc" }], + ) end it "blank" do @@ -236,29 +284,40 @@ it "unsupported" do client.client_type = "conference" expect(client.save).to be false - expect(client.errors.details).to eq(client_type: [{ error: :inclusion, value: "conference" }]) + expect(client.errors.details).to eq( + client_type: [{ error: :inclusion, value: "conference" }], + ) end end describe "repository_type" do - let(:client) { build(:client, provider: provider, client_type: "repository") } + let(:client) do + build(:client, provider: provider, client_type: "repository") + end it "should support repository_type" do - client.repository_type = ["institutional"] + client.repository_type = %w[institutional] expect(client.save).to be true expect(client.errors.details).to be_empty end it "should support multiple repository_types" do - client.repository_type = ["institutional", "governmental"] + client.repository_type = %w[institutional governmental] expect(client.save).to be true expect(client.errors.details).to be_empty end it "should reject unknown repository_type" do - client.repository_type = ["interplanetary"] + client.repository_type = %w[interplanetary] expect(client.save).to be false - expect(client.errors.details).to eq(repository_type: [{ error: "Repository type interplanetary is not included in the list of supported repository types." }]) + expect(client.errors.details).to eq( + repository_type: [ + { + error: + "Repository type interplanetary is not included in the list of supported repository types.", + }, + ], + ) end end @@ -280,24 +339,28 @@ it "should reject string that is not a UUID" do client.globus_uuid = "abc" expect(client.save).to be false - expect(client.errors.details).to eq(globus_uuid: [{ error: "abc is not a valid UUID" }]) + expect(client.errors.details).to eq( + globus_uuid: [{ error: "abc is not a valid UUID" }], + ) end end describe "cumulative_years" do before(:each) do - allow(Time).to receive(:now).and_return(Time.mktime(2015, 4, 8)) - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) end it "should show all cumulative years" do client = create(:client, provider: provider) - expect(client.cumulative_years).to eq([2015, 2016, 2017, 2018, 2019, 2020]) + expect(client.cumulative_years).to eq( + [2_015, 2_016, 2_017, 2_018, 2_019, 2_020], + ) end it "should show years before deleted" do client = create(:client, provider: provider, deleted_at: "2018-06-14") - expect(client.cumulative_years).to eq([2015, 2016, 2017]) + expect(client.cumulative_years).to eq([2_015, 2_016, 2_017]) end it "empty if deleted in creation year" do diff --git a/spec/models/data_catalog_spec.rb b/spec/models/data_catalog_spec.rb index 28173d3bb..d12606956 100644 --- a/spec/models/data_catalog_spec.rb +++ b/spec/models/data_catalog_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataCatalog, type: :model, vcr: true do @@ -26,13 +28,15 @@ it "all" do query = nil data_catalogs = DataCatalog.query(query) - expect(data_catalogs.dig(:meta, "total")).to eq(1723) + expect(data_catalogs.dig(:meta, "total")).to eq(1_723) expect(data_catalogs[:data].size).to eq(25) data_catalog = data_catalogs[:data].first expect(data_catalog.id).to eq("https://doi.org/10.17616/r3w05r") expect(data_catalog.re3data_id).to eq("r3d100011565") expect(data_catalog.name).to eq("1000 Functional Connectomes Project") - expect(data_catalog.url).to eq("http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html") + expect(data_catalog.url).to eq( + "http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html", + ) expect(data_catalog.certificates).to eq([]) end @@ -58,7 +62,9 @@ expect(data_catalog.id).to eq("https://doi.org/10.17616/r3p32s") expect(data_catalog.re3data_id).to eq("r3d100010621") expect(data_catalog.name).to eq("CDC - Climate Data Center") - expect(data_catalog.url).to eq("https://cdc.dwd.de/catalogue/srv/en/main.home") + expect(data_catalog.url).to eq( + "https://cdc.dwd.de/catalogue/srv/en/main.home", + ) expect(data_catalog.certificates).to eq([{ "text" => "other" }]) end @@ -88,14 +94,18 @@ it "open" do data_catalogs = DataCatalog.query(nil, open: true) - expect(data_catalogs.dig(:meta, "total")).to eq(1516) + expect(data_catalogs.dig(:meta, "total")).to eq(1_516) expect(data_catalogs[:data].size).to eq(25) data_catalog = data_catalogs[:data].first expect(data_catalog.id).to eq("https://doi.org/10.17616/r3w05r") expect(data_catalog.re3data_id).to eq("r3d100011565") expect(data_catalog.name).to eq("1000 Functional Connectomes Project") - expect(data_catalog.url).to eq("http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html") - expect(data_catalog.data_accesses).to eq([{ "restrictions" => [], "type" => "open" }]) + expect(data_catalog.url).to eq( + "http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html", + ) + expect(data_catalog.data_accesses).to eq( + [{ "restrictions" => [], "type" => "open" }], + ) end it "not found" do diff --git a/spec/models/datacite_doi_spec.rb b/spec/models/datacite_doi_spec.rb index 2b1967135..83c6de1a5 100644 --- a/spec/models/datacite_doi_spec.rb +++ b/spec/models/datacite_doi_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataciteDoi, type: :model, vcr: true do @@ -6,8 +8,21 @@ describe "import_by_ids", elasticsearch: true do let(:provider) { create(:provider) } let(:client) { create(:client, provider: provider) } - let(:target) { create(:client, provider: provider, symbol: provider.symbol + ".TARGET", name: "Target Client") } - let!(:dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let(:target) do + create( + :client, + provider: provider, + symbol: provider.symbol + ".TARGET", + name: "Target Client", + ) + end + let!(:dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end let(:doi) { dois.first } it "import by ids" do diff --git a/spec/models/doi_spec.rb b/spec/models/doi_spec.rb index d380b2d76..456590776 100644 --- a/spec/models/doi_spec.rb +++ b/spec/models/doi_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Doi, type: :model, vcr: true do diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb index c4c48b223..deb747a7d 100644 --- a/spec/models/event_spec.rb +++ b/spec/models/event_spec.rb @@ -1,7 +1,11 @@ +# frozen_string_literal: true + require "rails_helper" describe Event, type: :model, vcr: true do - before(:each) { allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) } + before(:each) do + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) + end context "event" do subject { create(:event) } @@ -21,8 +25,22 @@ doi = Event.import_doi(id) expect(doi.doi).to eq("10.1371/JOURNAL.PBIO.2001414") expect(doi.agency).to eq("crossref") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Identifiers for the 21st century: How to design, provision, and reuse persistent identifiers to maximize utility and impact of life science data" }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => + "Identifiers for the 21st century: How to design, provision, and reuse persistent identifiers to maximize utility and impact of life science data", + }, + ], + ) expect(doi.minted.to_s).to start_with("2017-06-29") expect(doi.schema_version).to eq("http://datacite.org/schema/kernel-4") expect(doi.datacentre).to eq(0) @@ -33,8 +51,22 @@ doi = Event.import_doi(doi.doi, refresh: true) expect(doi.doi).to eq("10.1371/JOURNAL.PBIO.2001414") expect(doi.agency).to eq("crossref") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Identifiers for the 21st century: How to design, provision, and reuse persistent identifiers to maximize utility and impact of life science data" }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => + "Identifiers for the 21st century: How to design, provision, and reuse persistent identifiers to maximize utility and impact of life science data", + }, + ], + ) expect(doi.minted.to_s).to start_with("2017-06-29") expect(doi.schema_version).to eq("http://datacite.org/schema/kernel-4") expect(doi.datacentre).to eq(0) @@ -45,8 +77,22 @@ doi = Event.import_doi(id) expect(doi.doi).to eq("10.3280/ECAG2018-001005") expect(doi.agency).to eq("medra") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Substitutability between organic and conventional poultry products and organic price premiums" }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => + "Substitutability between organic and conventional poultry products and organic price premiums", + }, + ], + ) expect(doi.minted.to_s).to start_with("2018-07-12") expect(doi.datacentre).to eq(0) end @@ -56,8 +102,22 @@ doi = Event.import_doi(id) expect(doi.doi).to eq("10.5012/BKCS.2013.34.10.2889") expect(doi.agency).to eq("kisti") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Synthesis, Crystal Structure and Theoretical Calculation of a Novel Nickel(II) Complex with Dibromotyrosine and 1,10-Phenanthroline" }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => + "Synthesis, Crystal Structure and Theoretical Calculation of a Novel Nickel(II) Complex with Dibromotyrosine and 1,10-Phenanthroline", + }, + ], + ) expect(doi.minted.to_s).to start_with("2013-11-25") expect(doi.datacentre).to eq(0) end @@ -67,8 +127,22 @@ doi = Event.import_doi(id) expect(doi.doi).to eq("10.1241/JOHOKANRI.39.979") expect(doi.agency).to eq("jalc") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Utilizing the Internet. 12 Series. Future of the Internet." }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => + "Utilizing the Internet. 12 Series. Future of the Internet.", + }, + ], + ) expect(doi.minted.to_s).to start_with("2002-08-08") expect(doi.datacentre).to eq(0) end @@ -78,8 +152,21 @@ doi = Event.import_doi(id) expect(doi.doi).to eq("10.2903/J.EFSA.2018.5239") expect(doi.agency).to eq("op") - expect(doi.types).to eq("bibtex" => "article", "citeproc" => "article-journal", "resourceType" => "JournalArticle", "resourceTypeGeneral" => "Text", "ris" => "JOUR", "schemaOrg" => "ScholarlyArticle") - expect(doi.titles).to eq([{ "title" => "Scientific opinion on the safety of green tea catechins" }]) + expect(doi.types).to eq( + "bibtex" => "article", + "citeproc" => "article-journal", + "resourceType" => "JournalArticle", + "resourceTypeGeneral" => "Text", + "ris" => "JOUR", + "schemaOrg" => "ScholarlyArticle", + ) + expect(doi.titles).to eq( + [ + { + "title" => "Scientific opinion on the safety of green tea catechins", + }, + ], + ) expect(doi.minted.to_s).to start_with("2018-12-17") expect(doi.datacentre).to eq(0) end @@ -104,14 +191,21 @@ end context "citation" do - subject { create(:event_for_datacite_related, subj_id: "https://doi.org/10.5061/dryad.47sd5e/2") } + subject do + create( + :event_for_datacite_related, + subj_id: "https://doi.org/10.5061/dryad.47sd5e/2", + ) + end it "has citation_id" do - expect(subject.citation_id).to eq("https://doi.org/10.5061/dryad.47sd5/1-https://doi.org/10.5061/dryad.47sd5e/2") + expect(subject.citation_id).to eq( + "https://doi.org/10.5061/dryad.47sd5/1-https://doi.org/10.5061/dryad.47sd5e/2", + ) end it "has citation_year" do - expect(subject.citation_year).to eq(2015) + expect(subject.citation_year).to eq(2_015) end let(:doi) { create(:doi) } @@ -119,13 +213,15 @@ it "date_published from the database" do published = subject.date_published("https://doi.org/" + doi.doi) expect(published).to eq("2011") - expect(published).not_to eq(2011) + expect(published).not_to eq(2_011) end it "label_state_event with not existent prefix" do expect(Event.find_by(uuid: subject.uuid).state_event).to be_nil Event.label_state_event(uuid: subject.uuid, subj_id: subject.subj_id) - expect(Event.find_by(uuid: subject.uuid).state_event).to eq("crossref_citations_error") + expect(Event.find_by(uuid: subject.uuid).state_event).to eq( + "crossref_citations_error", + ) end context "prefix exists, then dont to change" do @@ -164,11 +260,13 @@ subject { create(:event_for_crossref) } it "creates event" do - expect(subject.subj_id).to eq("https://doi.org/10.1371/journal.pbio.2001414") + expect(subject.subj_id).to eq( + "https://doi.org/10.1371/journal.pbio.2001414", + ) expect(subject.obj_id).to eq("https://doi.org/10.5061/dryad.47sd5e/1") expect(subject.relation_type_id).to eq("references") expect(subject.source_id).to eq("crossref") - expect(subject.dois_to_import).to eq(["10.1371/journal.pbio.2001414"]) + expect(subject.dois_to_import).to eq(%w[10.1371/journal.pbio.2001414]) end end @@ -176,11 +274,13 @@ subject { create(:event_for_crossref_import) } it "creates event" do - expect(subject.subj_id).to eq("https://doi.org/10.1371/journal.pbio.2001414") + expect(subject.subj_id).to eq( + "https://doi.org/10.1371/journal.pbio.2001414", + ) expect(subject.obj_id).to be_nil expect(subject.relation_type_id).to eq("references") expect(subject.source_id).to eq("crossref_import") - expect(subject.dois_to_import).to eq(["10.1371/journal.pbio.2001414"]) + expect(subject.dois_to_import).to eq(%w[10.1371/journal.pbio.2001414]) end end @@ -225,7 +325,11 @@ it "should transform keys" do Event.camelcase_nested_objects(subject.uuid) - expect(subject.subj.keys).to include("datePublished", "registrantId", "id") + expect(subject.subj.keys).to include( + "datePublished", + "registrantId", + "id", + ) end end end diff --git a/spec/models/funder_spec.rb b/spec/models/funder_spec.rb index 7b2db2d44..515713ddf 100644 --- a/spec/models/funder_spec.rb +++ b/spec/models/funder_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Funder, type: :model, vcr: true do @@ -9,7 +11,13 @@ funder = funder[:data].first expect(funder.id).to eq("https://doi.org/10.13039/501100006568") expect(funder.name).to eq("Toagosei") - expect(funder.alternate_name).to eq(["Toagosei Co., Ltd.", "Toagosei Chemical Industry Company Limited", "Toagosei Company Limited"]) + expect(funder.alternate_name).to eq( + [ + "Toagosei Co., Ltd.", + "Toagosei Chemical Industry Company Limited", + "Toagosei Company Limited", + ], + ) expect(funder.country).to eq("code" => "JP", "name" => "Japan") end @@ -17,14 +25,18 @@ id = "https://doi.org/10.13039/xxxxx" funder = Funder.find_by(id: id) expect(funder[:data]).to be_nil - expect(funder[:errors]).to eq([{ "status" => 404, "title" => "Not found." }]) + expect(funder[:errors]).to eq( + [{ "status" => 404, "title" => "Not found." }], + ) end it "not a doi" do id = "xxxxx" funder = Funder.find_by(id: id) expect(funder[:data]).to be_nil - expect(funder[:errors]).to eq([{ "status" => 422, "title" => "Not a valid DOI." }]) + expect(funder[:errors]).to eq( + [{ "status" => 422, "title" => "Not a valid DOI." }], + ) end end @@ -32,7 +44,7 @@ it "found all" do query = nil funders = Funder.query(query) - expect(funders.dig(:meta, "total")).to eq(24413) + expect(funders.dig(:meta, "total")).to eq(24_413) expect(funders.dig(:data).size).to eq(25) funder = funders[:data].first expect(funder.id).to eq("https://doi.org/10.13039/501100010742") @@ -44,7 +56,7 @@ it "found all paginate" do query = nil funders = Funder.query(query, offset: 2, limit: 3) - expect(funders.dig(:meta, "total")).to eq(24413) + expect(funders.dig(:meta, "total")).to eq(24_413) expect(funders.dig(:data).size).to eq(3) funder = funders[:data].first expect(funder.id).to eq("https://doi.org/10.13039/501100004246") @@ -61,7 +73,15 @@ funder = funders[:data].first expect(funder.id).to eq("https://doi.org/10.13039/100004875") expect(funder.name).to eq("Massachusetts Department of Fish and Game") - expect(funder.alternate_name).to eq(["Massachusetts Fish & Game Department", "Department of Fish and Game", "DFG", "The Department of Fish and Game", "MassDFG"]) + expect(funder.alternate_name).to eq( + [ + "Massachusetts Fish & Game Department", + "Department of Fish and Game", + "DFG", + "The Department of Fish and Game", + "MassDFG", + ], + ) expect(funder.country).to eq("code" => "US", "name" => "United States") end diff --git a/spec/models/handler_spec.rb b/spec/models/handler_spec.rb index cca727eca..33074bd9a 100644 --- a/spec/models/handler_spec.rb +++ b/spec/models/handler_spec.rb @@ -1,3 +1,4 @@ +# frozen_string_literal: true # require 'rails_helper' # require 'handler' # diff --git a/spec/models/media_spec.rb b/spec/models/media_spec.rb index 2c57ec398..d2c50ae7a 100644 --- a/spec/models/media_spec.rb +++ b/spec/models/media_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Media, type: :model do diff --git a/spec/models/metadata_spec.rb b/spec/models/metadata_spec.rb index 43bd21c64..166146b97 100644 --- a/spec/models/metadata_spec.rb +++ b/spec/models/metadata_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Metadata, type: :model, vcr: true do diff --git a/spec/models/organization_spec.rb b/spec/models/organization_spec.rb index ade66a628..46338bf24 100644 --- a/spec/models/organization_spec.rb +++ b/spec/models/organization_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Organization, type: :model, vcr: true do @@ -21,22 +23,30 @@ describe "crossref_funder_id_from_url" do it "full url" do crossref_funder_id = "https://doi.org/10.13039/501100000780" - expect(Organization.crossref_funder_id_from_url(crossref_funder_id)).to eq("10.13039/501100000780") + expect( + Organization.crossref_funder_id_from_url(crossref_funder_id), + ).to eq("10.13039/501100000780") end it "without https" do crossref_funder_id = "doi.org/10.13039/501100000780" - expect(Organization.crossref_funder_id_from_url(crossref_funder_id)).to eq("10.13039/501100000780") + expect( + Organization.crossref_funder_id_from_url(crossref_funder_id), + ).to eq("10.13039/501100000780") end it "without full path" do crossref_funder_id = "10.13039/501100000780" - expect(Organization.crossref_funder_id_from_url(crossref_funder_id)).to eq("10.13039/501100000780") + expect( + Organization.crossref_funder_id_from_url(crossref_funder_id), + ).to eq("10.13039/501100000780") end it "without full path" do crossref_funder_id = "10.1038/501100000780" - expect(Organization.crossref_funder_id_from_url(crossref_funder_id)).to be_nil + expect( + Organization.crossref_funder_id_from_url(crossref_funder_id), + ).to be_nil end end @@ -70,11 +80,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/0521rfb23") expect(organization.name).to eq("Lincoln University - Pennsylvania") - expect(organization.labels).to eq([{ "code" => "ES", "name" => "Universidad Lincoln" }]) - expect(organization.links).to eq(["http://www.lincoln.edu/"]) + expect(organization.labels).to eq( + [{ "code" => "ES", "name" => "Universidad Lincoln" }], + ) + expect(organization.links).to eq(%w[http://www.lincoln.edu/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1854") - expect(organization.geolocation).to eq("latitude" => 39.808333333333, "longitude" => -75.927777777778) + expect(organization.geolocation).to eq( + "latitude" => 39.808333333333, "longitude" => -75.927777777778, + ) expect(organization.ringgold).to eq("4558") end @@ -85,27 +99,42 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/013meh722") expect(organization.name).to eq("University of Cambridge") - expect(organization.labels).to eq([{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }]) - expect(organization.links).to eq(["http://www.cam.ac.uk/"]) + expect(organization.labels).to eq( + [{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }], + ) + expect(organization.links).to eq(%w[http://www.cam.ac.uk/]) expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end it "found datacite member" do - member = create(:provider, role_name: "ROLE_CONSORTIUM_ORGANIZATION", name: "University of Cambridge", symbol: "LPSW", ror_id: "https://ror.org/013meh722") + member = + create( + :provider, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + name: "University of Cambridge", + symbol: "LPSW", + ror_id: "https://ror.org/013meh722", + ) id = "https://ror.org/013meh722" organizations = Organization.find_by(id: id) expect(organizations[:data].size).to eq(1) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/013meh722") expect(organization.name).to eq("University of Cambridge") - expect(organization.labels).to eq([{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }]) - expect(organization.links).to eq(["http://www.cam.ac.uk/"]) + expect(organization.labels).to eq( + [{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }], + ) + expect(organization.links).to eq(%w[http://www.cam.ac.uk/]) expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end @@ -116,11 +145,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/018mejw64") expect(organization.name).to eq("Deutsche Forschungsgemeinschaft") - expect(organization.labels).to eq([{ "code" => "EN", "name" => "German Research Foundation" }]) - expect(organization.links).to eq(["http://www.dfg.de/en/"]) + expect(organization.labels).to eq( + [{ "code" => "EN", "name" => "German Research Foundation" }], + ) + expect(organization.links).to eq(%w[http://www.dfg.de/en/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1951") - expect(organization.geolocation).to eq("latitude" => 50.699443, "longitude" => 7.14777) + expect(organization.geolocation).to eq( + "latitude" => 50.699443, "longitude" => 7.14777, + ) expect(organization.ringgold).to eq("39045") end @@ -132,7 +165,7 @@ expect(organization.id).to eq("https://ror.org/02q0ygf45") expect(organization.name).to eq("OBS Medical (United Kingdom)") expect(organization.labels).to eq([]) - expect(organization.links).to eq(["http://www.obsmedical.com/"]) + expect(organization.links).to eq(%w[http://www.obsmedical.com/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to be_nil expect(organization.geolocation).to be_empty @@ -155,11 +188,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/0521rfb23") expect(organization.name).to eq("Lincoln University - Pennsylvania") - expect(organization.labels).to eq([{ "code" => "ES", "name" => "Universidad Lincoln" }]) - expect(organization.links).to eq(["http://www.lincoln.edu/"]) + expect(organization.labels).to eq( + [{ "code" => "ES", "name" => "Universidad Lincoln" }], + ) + expect(organization.links).to eq(%w[http://www.lincoln.edu/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1854") - expect(organization.geolocation).to eq("latitude" => 39.808333333333, "longitude" => -75.927777777778) + expect(organization.geolocation).to eq( + "latitude" => 39.808333333333, "longitude" => -75.927777777778, + ) expect(organization.ringgold).to eq("4558") end @@ -170,11 +207,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/013meh722") expect(organization.name).to eq("University of Cambridge") - expect(organization.labels).to eq([{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }]) - expect(organization.links).to eq(["http://www.cam.ac.uk/"]) + expect(organization.labels).to eq( + [{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }], + ) + expect(organization.links).to eq(%w[http://www.cam.ac.uk/]) expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end @@ -185,11 +226,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/018mejw64") expect(organization.name).to eq("Deutsche Forschungsgemeinschaft") - expect(organization.labels).to eq([{ "code" => "EN", "name" => "German Research Foundation" }]) - expect(organization.links).to eq(["http://www.dfg.de/en/"]) + expect(organization.labels).to eq( + [{ "code" => "EN", "name" => "German Research Foundation" }], + ) + expect(organization.links).to eq(%w[http://www.dfg.de/en/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1951") - expect(organization.geolocation).to eq("latitude" => 50.699443, "longitude" => 7.14777) + expect(organization.geolocation).to eq( + "latitude" => 50.699443, "longitude" => 7.14777, + ) expect(organization.ringgold).to eq("39045") end @@ -201,7 +246,7 @@ expect(organization.id).to eq("https://ror.org/02q0ygf45") expect(organization.name).to eq("OBS Medical (United Kingdom)") expect(organization.labels).to eq([]) - expect(organization.links).to eq(["http://www.obsmedical.com/"]) + expect(organization.links).to eq(%w[http://www.obsmedical.com/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to be_nil expect(organization.geolocation).to be_empty @@ -224,11 +269,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/0521rfb23") expect(organization.name).to eq("Lincoln University - Pennsylvania") - expect(organization.labels).to eq([{ "code" => "ES", "name" => "Universidad Lincoln" }]) - expect(organization.links).to eq(["http://www.lincoln.edu/"]) + expect(organization.labels).to eq( + [{ "code" => "ES", "name" => "Universidad Lincoln" }], + ) + expect(organization.links).to eq(%w[http://www.lincoln.edu/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1854") - expect(organization.geolocation).to eq("latitude" => 39.808333333333, "longitude" => -75.927777777778) + expect(organization.geolocation).to eq( + "latitude" => 39.808333333333, "longitude" => -75.927777777778, + ) expect(organization.ringgold).to eq("4558") end @@ -239,11 +288,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/013meh722") expect(organization.name).to eq("University of Cambridge") - expect(organization.labels).to eq([{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }]) - expect(organization.links).to eq(["http://www.cam.ac.uk/"]) + expect(organization.labels).to eq( + [{ "code" => "CY", "name" => "Prifysgol Caergrawnt" }], + ) + expect(organization.links).to eq(%w[http://www.cam.ac.uk/]) expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end @@ -254,11 +307,15 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/018mejw64") expect(organization.name).to eq("Deutsche Forschungsgemeinschaft") - expect(organization.labels).to eq([{ "code" => "EN", "name" => "German Research Foundation" }]) - expect(organization.links).to eq(["http://www.dfg.de/en/"]) + expect(organization.labels).to eq( + [{ "code" => "EN", "name" => "German Research Foundation" }], + ) + expect(organization.links).to eq(%w[http://www.dfg.de/en/]) expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1951") - expect(organization.geolocation).to eq("latitude" => 50.699443, "longitude" => 7.14777) + expect(organization.geolocation).to eq( + "latitude" => 50.699443, "longitude" => 7.14777, + ) expect(organization.ringgold).to eq("39045") end @@ -280,7 +337,9 @@ expect(organization.name).to eq("University of Cambridge") expect(organization.twitter).to eq("Cambridge_Uni") expect(organization.inception_year).to eq("1209") - expect(organization.geolocation).to eq("latitude" => 52.205277777778, "longitude" => 0.11722222222222) + expect(organization.geolocation).to eq( + "latitude" => 52.205277777778, "longitude" => 0.11722222222222, + ) expect(organization.ringgold).to eq("2152") end @@ -293,7 +352,9 @@ expect(organization.name).to eq("German Research Foundation") expect(organization.twitter).to be_nil expect(organization.inception_year).to eq("1951") - expect(organization.geolocation).to eq("latitude" => 50.699443, "longitude" => 7.14777) + expect(organization.geolocation).to eq( + "latitude" => 50.699443, "longitude" => 7.14777, + ) expect(organization.ringgold).to eq("39045") end end @@ -302,25 +363,27 @@ it "all" do query = nil organizations = Organization.query(query) - expect(organizations.dig(:meta, "total")).to eq(98332) + expect(organizations.dig(:meta, "total")).to eq(98_332) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/027bk5v43") expect(organization.name).to eq("Illinois Department of Public Health") expect(organization.labels).to eq([]) - expect(organization.links).to eq(["http://www.dph.illinois.gov/"]) + expect(organization.links).to eq(%w[http://www.dph.illinois.gov/]) end it "found" do query = "lincoln university" organizations = Organization.query(query) - expect(organizations.dig(:meta, "total")).to eq(10764) + expect(organizations.dig(:meta, "total")).to eq(10_764) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/04ps1r162") expect(organization.name).to eq("Lincoln University") - expect(organization.labels).to eq([{ "code" => "MI", "name" => "Te Whare Wanaka o Aoraki" }]) - expect(organization.links).to eq(["http://www.lincoln.ac.nz/"]) + expect(organization.labels).to eq( + [{ "code" => "MI", "name" => "Te Whare Wanaka o Aoraki" }], + ) + expect(organization.links).to eq(%w[http://www.lincoln.ac.nz/]) end it "found with umlaut" do @@ -331,56 +394,65 @@ organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/01856cw59") expect(organization.name).to eq("University Hospital Münster") - expect(organization.labels).to eq([{ "code" => "DE", "name" => "Universitätsklinikum Münster" }]) - expect(organization.links).to eq(["http://klinikum.uni-muenster.de/"]) + expect(organization.labels).to eq( + [{ "code" => "DE", "name" => "Universitätsklinikum Münster" }], + ) + expect(organization.links).to eq(%w[http://klinikum.uni-muenster.de/]) end it "found page 2" do query = "lincoln university" organizations = Organization.query(query, offset: 2) - expect(organizations.dig(:meta, "total")).to eq(10764) + expect(organizations.dig(:meta, "total")).to eq(10_764) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/01qb09m39") expect(organization.name).to eq("Lincoln Agritech (New Zealand)") expect(organization.labels).to eq([]) - expect(organization.links).to eq(["https://www.lincolnagritech.co.nz/"]) + expect(organization.links).to eq(%w[https://www.lincolnagritech.co.nz/]) end it "found by types government" do organizations = Organization.query(nil, types: "government") - expect(organizations.dig(:meta, "total")).to eq(5762) + expect(organizations.dig(:meta, "total")).to eq(5_762) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/027bk5v43") expect(organization.name).to eq("Illinois Department of Public Health") - expect(organization.types).to eq(["Government"]) + expect(organization.types).to eq(%w[Government]) expect(organization.labels).to eq([]) - expect(organization.links).to eq(["http://www.dph.illinois.gov/"]) + expect(organization.links).to eq(%w[http://www.dph.illinois.gov/]) end it "found by country gb" do organizations = Organization.query(nil, country: "gb") - expect(organizations.dig(:meta, "total")).to eq(7166) + expect(organizations.dig(:meta, "total")).to eq(7_166) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/04jzmdh37") expect(organization.name).to eq("Centre for Economic Policy Research") - expect(organization.types).to eq(["Nonprofit"]) + expect(organization.types).to eq(%w[Nonprofit]) expect(organization.labels).to eq([]) - expect(organization.links).to eq(["http://www.cepr.org/"]) + expect(organization.links).to eq(%w[http://www.cepr.org/]) end it "found by types and country" do - organizations = Organization.query(nil, types: "government", country: "gb") + organizations = + Organization.query(nil, types: "government", country: "gb") expect(organizations.dig(:meta, "total")).to eq(314) expect(organizations[:data].size).to eq(20) organization = organizations[:data].first expect(organization.id).to eq("https://ror.org/04jswqb94") - expect(organization.name).to eq("Defence Science and Technology Laboratory") - expect(organization.types).to eq(["Government"]) + expect(organization.name).to eq( + "Defence Science and Technology Laboratory", + ) + expect(organization.types).to eq(%w[Government]) expect(organization.labels).to eq([]) - expect(organization.links).to eq(["https://www.gov.uk/government/organisations/defence-science-and-technology-laboratory"]) + expect(organization.links).to eq( + %w[ + https://www.gov.uk/government/organisations/defence-science-and-technology-laboratory + ], + ) end it "not found" do @@ -390,7 +462,8 @@ end it "status code not 200" do - url = "https://api.ror.org/organizations?query=lincoln%20university&page=1" + url = + "https://api.ror.org/organizations?query=lincoln%20university&page=1" stub = stub_request(:get, url).and_return(status: [408]) query = "lincoln university" diff --git a/spec/models/other_doi_spec.rb b/spec/models/other_doi_spec.rb index bb0441a9e..fb1ac2a4f 100644 --- a/spec/models/other_doi_spec.rb +++ b/spec/models/other_doi_spec.rb @@ -1,10 +1,14 @@ +# frozen_string_literal: true + require "rails_helper" describe OtherDoi, type: :model, vcr: true do it_behaves_like "an STI class" describe "import_by_ids", elasticsearch: true do - let!(:dois) { create_list(:doi, 3, aasm_state: "findable", type: "OtherDoi") } + let!(:dois) do + create_list(:doi, 3, aasm_state: "findable", type: "OtherDoi") + end let(:doi) { dois.first } it "import by ids" do diff --git a/spec/models/person_spec.rb b/spec/models/person_spec.rb index fd3a4a999..6c52fa2e3 100644 --- a/spec/models/person_spec.rb +++ b/spec/models/person_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Person, type: :model, vcr: true do @@ -30,15 +32,35 @@ expect(person.family_name).to eq("Garza") expect(person.alternate_name).to eq(["Kristian Javier Garza Gutierrez"]) expect(person.description).to be_nil - expect(person.links).to eq([{ "name" => "Mendeley profile", "url" => "https://www.mendeley.com/profiles/kristian-g/" }, { "name" => "github", "url" => "https://github.com/kjgarza" }]) - expect(person.identifiers).to eq([{ "identifier" => "kjgarza", - "identifierType" => "GitHub", - "identifierUrl" => "https://github.com/kjgarza" }]) + expect(person.links).to eq( + [ + { + "name" => "Mendeley profile", + "url" => "https://www.mendeley.com/profiles/kristian-g/", + }, + { "name" => "github", "url" => "https://github.com/kjgarza" }, + ], + ) + expect(person.identifiers).to eq( + [ + { + "identifier" => "kjgarza", + "identifierType" => "GitHub", + "identifierUrl" => "https://github.com/kjgarza", + }, + ], + ) expect(person.country).to eq("id" => "DE", "name" => "Germany") expect(person.employment.length).to eq(1) - expect(person.employment).to eq([{ "organization_name" => "DataCite", - "role_title" => "Application Developer", - "start_date" => "2016-08-01T00:00:00Z" }]) + expect(person.employment).to eq( + [ + { + "organization_name" => "DataCite", + "role_title" => "Application Developer", + "start_date" => "2016-08-01T00:00:00Z", + }, + ], + ) end it "found with biography" do @@ -51,36 +73,63 @@ expect(person.given_name).to eq("Martin") expect(person.family_name).to eq("Fenner") expect(person.alternate_name).to eq(["Martin Hellmut Fenner"]) - expect(person.description).to eq("Martin Fenner is the DataCite Technical Director since 2015. From 2012 to 2015 he was the technical lead for the PLOS Article-Level Metrics project. Martin has a medical degree from the Free University of Berlin and is a Board-certified medical oncologist.") - expect(person.links).to eq([{ "name" => "Twitter", "url" => "http://twitter.com/mfenner" }]) - expect(person.identifiers).to eq([{ "identifier" => "7006600825", - "identifierType" => "Scopus Author ID", - "identifierUrl" => - "http://www.scopus.com/inward/authorDetails.url?authorID=7006600825&partnerID=MN8TOARS" }, - { "identifier" => "000000035060549X", - "identifierType" => "ISNI", - "identifierUrl" => "http://isni.org/000000035060549X" }, - { "identifier" => "mfenner", - "identifierType" => "GitHub", - "identifierUrl" => "https://github.com/mfenner" }]) + expect(person.description).to eq( + "Martin Fenner is the DataCite Technical Director since 2015. From 2012 to 2015 he was the technical lead for the PLOS Article-Level Metrics project. Martin has a medical degree from the Free University of Berlin and is a Board-certified medical oncologist.", + ) + expect(person.links).to eq( + [{ "name" => "Twitter", "url" => "http://twitter.com/mfenner" }], + ) + expect(person.identifiers).to eq( + [ + { + "identifier" => "7006600825", + "identifierType" => "Scopus Author ID", + "identifierUrl" => + "http://www.scopus.com/inward/authorDetails.url?authorID=7006600825&partnerID=MN8TOARS", + }, + { + "identifier" => "000000035060549X", + "identifierType" => "ISNI", + "identifierUrl" => "http://isni.org/000000035060549X", + }, + { + "identifier" => "mfenner", + "identifierType" => "GitHub", + "identifierUrl" => "https://github.com/mfenner", + }, + ], + ) expect(person.country).to eq("id" => "DE", "name" => "Germany") - expect(person.employment).to eq([{ "organization_id" => "https://grid.ac/institutes/grid.475826.a", - "organization_name" => "DataCite", - "role_title" => "Technical Director", - "start_date" => "2015-08-01T00:00:00Z" }, - { "end_date" => "2017-05-01T00:00:00Z", - "organization_id" => "https://grid.ac/institutes/grid.10423.34", - "organization_name" => "Hannover Medical School", - "role_title" => "Clinical Fellow in Hematology and Oncology", - "start_date" => "2005-11-01T00:00:00Z" }, - { "end_date" => "2015-07-01T00:00:00Z", - "organization_name" => "Public Library of Science", - "role_title" => "Technical lead article-level metrics project (contractor)", - "start_date" => "2012-04-01T00:00:00Z" }, - { "end_date" => "2005-10-01T00:00:00Z", - "organization_name" => "Charité Universitätsmedizin Berlin", - "role_title" => "Resident in Internal Medicine", - "start_date" => "1998-09-01T00:00:00Z" }]) + expect(person.employment).to eq( + [ + { + "organization_id" => "https://grid.ac/institutes/grid.475826.a", + "organization_name" => "DataCite", + "role_title" => "Technical Director", + "start_date" => "2015-08-01T00:00:00Z", + }, + { + "end_date" => "2017-05-01T00:00:00Z", + "organization_id" => "https://grid.ac/institutes/grid.10423.34", + "organization_name" => "Hannover Medical School", + "role_title" => "Clinical Fellow in Hematology and Oncology", + "start_date" => "2005-11-01T00:00:00Z", + }, + { + "end_date" => "2015-07-01T00:00:00Z", + "organization_name" => "Public Library of Science", + "role_title" => + "Technical lead article-level metrics project (contractor)", + "start_date" => "2012-04-01T00:00:00Z", + }, + { + "end_date" => "2005-10-01T00:00:00Z", + "organization_name" => "Charité Universitätsmedizin Berlin", + "role_title" => "Resident in Internal Medicine", + "start_date" => "1998-09-01T00:00:00Z", + }, + ], + ) end it "found with X in ID" do @@ -98,14 +147,24 @@ expect(person.identifiers).to be_empty expect(person.country).to be_nil expect(person.employment.length).to eq(1) - expect(person.employment).to eq([{ "organization_name" => "University of Cambridge", - "role_title" => "Senior Lecturer in Latin American Literature and Culture", - "start_date" => "2006-01-01T00:00:00Z" }]) + expect(person.employment).to eq( + [ + { + "organization_name" => "University of Cambridge", + "role_title" => + "Senior Lecturer in Latin American Literature and Culture", + "start_date" => "2006-01-01T00:00:00Z", + }, + ], + ) end it "account locked" do id = "https://orcid.org/0000-0003-1315-5960" - expect { Person.find_by(id: id) }.to raise_error(Faraday::ClientError, /ORCID record is locked/) + expect { Person.find_by(id: id) }.to raise_error( + Faraday::ClientError, + /ORCID record is locked/, + ) end it "not found" do @@ -120,7 +179,7 @@ it "found all" do query = nil people = Person.query(query) - expect(people.dig(:meta, "total")).to eq(9229580) + expect(people.dig(:meta, "total")).to eq(9_229_580) expect(people.dig(:data).size).to eq(25) person = people[:data].first expect(person.id).to eq("https://orcid.org/0000-0002-5387-6407") @@ -137,7 +196,7 @@ it "found miller" do query = "miller" people = Person.query(query) - expect(people.dig(:meta, "total")).to eq(7660) + expect(people.dig(:meta, "total")).to eq(7_660) expect(people.dig(:data).size).to eq(25) person = people[:data].first expect(person.id).to eq("https://orcid.org/0000-0002-2131-0054") @@ -154,7 +213,7 @@ it "found datacite" do query = "datacite" people = Person.query(query) - expect(people.dig(:meta, "total")).to eq(15825) + expect(people.dig(:meta, "total")).to eq(15_825) expect(people.dig(:data).size).to eq(25) person = people[:data].first expect(person.id).to eq("https://orcid.org/0000-0002-9300-5278") @@ -170,7 +229,10 @@ it "handle errors gracefully" do query = "container.identifier:2658-719X" - expect { Person.query(query) }.to raise_error(Faraday::ClientError, /Error from server/) + expect { Person.query(query) }.to raise_error( + Faraday::ClientError, + /Error from server/, + ) end end end diff --git a/spec/models/prefix_spec.rb b/spec/models/prefix_spec.rb index 79f66e3ad..665e47848 100644 --- a/spec/models/prefix_spec.rb +++ b/spec/models/prefix_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" RSpec.describe Prefix, type: :model do @@ -21,7 +23,8 @@ end it "prefixes with where year" do - collection = Prefix.where("YEAR(prefixes.created_at) = ?", prefix.created_at) + collection = + Prefix.where("YEAR(prefixes.created_at) = ?", prefix.created_at) single = collection.first expect(single.created_at.year).to eq(prefix.created_at.year) expect(single.uid).to eq(prefix.uid) diff --git a/spec/models/provider_prefix_spec.rb b/spec/models/provider_prefix_spec.rb index 6c6159beb..254ac399b 100644 --- a/spec/models/provider_prefix_spec.rb +++ b/spec/models/provider_prefix_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ProviderPrefix, type: :model do diff --git a/spec/models/provider_spec.rb b/spec/models/provider_spec.rb index cbad4d53f..666ec946b 100644 --- a/spec/models/provider_spec.rb +++ b/spec/models/provider_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe Provider, type: :model do @@ -17,7 +19,9 @@ end describe "admin" do - subject { create(:provider, role_name: "ROLE_ADMIN", name: "Admin", symbol: "ADMIN") } + subject do + create(:provider, role_name: "ROLE_ADMIN", name: "Admin", symbol: "ADMIN") + end it "works" do expect(subject.role_name).to eq("ROLE_ADMIN") @@ -25,7 +29,14 @@ end describe "provider with ROLE_CONTRACTUAL_PROVIDER" do - subject { create(:provider, role_name: "ROLE_CONTRACTUAL_PROVIDER", name: "Contractor", symbol: "CONTRCTR") } + subject do + create( + :provider, + role_name: "ROLE_CONTRACTUAL_PROVIDER", + name: "Contractor", + symbol: "CONTRCTR", + ) + end it "works" do expect(subject.role_name).to eq("ROLE_CONTRACTUAL_PROVIDER") @@ -35,7 +46,14 @@ end describe "provider with ROLE_REGISTRATION_AGENCY" do - subject { create(:provider, role_name: "ROLE_REGISTRATION_AGENCY", name: "Crossref", symbol: "CROSSREF") } + subject do + create( + :provider, + role_name: "ROLE_REGISTRATION_AGENCY", + name: "Crossref", + symbol: "CROSSREF", + ) + end it "works" do expect(subject.role_name).to eq("ROLE_REGISTRATION_AGENCY") @@ -68,7 +86,9 @@ it "not_supported" do subject.non_profit_status = "super-profit" expect(subject.save).to be false - expect(subject.errors.details).to eq(non_profit_status: [{ error: :inclusion, value: "super-profit" }]) + expect(subject.errors.details).to eq( + non_profit_status: [{ error: :inclusion, value: "super-profit" }], + ) end end @@ -76,13 +96,21 @@ subject { build(:provider) } it "with logo" do - subject.logo = "data:image/png;base64," + Base64.strict_encode64(file_fixture("bl.png").read) + subject.logo = + "data:image/png;base64," + + Base64.strict_encode64(file_fixture("bl.png").read) expect(subject.save).to be true expect(subject.errors.details).to be_empty expect(subject.logo.file?).to be true - expect(subject.logo.url).to start_with("/images/members/" + subject.symbol.downcase + ".png") - expect(subject.logo.url(:medium)).to start_with("/images/members/" + subject.symbol.downcase + ".png") - expect(subject.logo_url).to start_with("/images/members/" + subject.symbol.downcase + ".png") + expect(subject.logo.url).to start_with( + "/images/members/" + subject.symbol.downcase + ".png", + ) + expect(subject.logo.url(:medium)).to start_with( + "/images/members/" + subject.symbol.downcase + ".png", + ) + expect(subject.logo_url).to start_with( + "/images/members/" + subject.symbol.downcase + ".png", + ) expect(subject.logo_file_name).to eq(subject.symbol.downcase + ".png") expect(subject.logo.content_type).to eq("image/png") expect(subject.logo.size).to be > 10 @@ -109,7 +137,9 @@ it "invalid" do subject.salesforce_id = "abc" expect(subject.save).to be false - expect(subject.errors.details).to eq(salesforce_id: [{ error: :invalid, value: "abc" }]) + expect(subject.errors.details).to eq( + salesforce_id: [{ error: :invalid, value: "abc" }], + ) end it "blank" do @@ -120,9 +150,23 @@ end describe "provider with ROLE_CONSORTIUM" do - subject { create(:provider, role_name: "ROLE_CONSORTIUM", name: "Virtual Library of Virginia", symbol: "VIVA") } + subject do + create( + :provider, + role_name: "ROLE_CONSORTIUM", + name: "Virtual Library of Virginia", + symbol: "VIVA", + ) + end - let!(:consortium_organizations) { create_list(:provider, 3, role_name: "ROLE_CONSORTIUM_ORGANIZATION", consortium_id: subject.symbol) } + let!(:consortium_organizations) do + create_list( + :provider, + 3, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + consortium_id: subject.symbol, + ) + end it "works" do expect(subject.role_name).to eq("ROLE_CONSORTIUM") @@ -131,14 +175,30 @@ expect(subject.consortium_organizations.length).to eq(3) consortium_organization = subject.consortium_organizations.last expect(consortium_organization.consortium_id).to eq("VIVA") - expect(consortium_organization.member_type).to eq("consortium_organization") + expect(consortium_organization.member_type).to eq( + "consortium_organization", + ) end end describe "provider with ROLE_CONSORTIUM_ORGANIZATION" do - let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM", name: "Virtual Library of Virginia", symbol: "VIVA") } + let(:consortium) do + create( + :provider, + role_name: "ROLE_CONSORTIUM", + name: "Virtual Library of Virginia", + symbol: "VIVA", + ) + end - subject { create(:provider, name: "University of Virginia", role_name: "ROLE_CONSORTIUM_ORGANIZATION", consortium_id: consortium.symbol) } + subject do + create( + :provider, + name: "University of Virginia", + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + consortium_id: consortium.symbol, + ) + end it "works" do expect(subject.role_name).to eq("ROLE_CONSORTIUM_ORGANIZATION") @@ -155,7 +215,9 @@ params = provider.to_jsonapi expect(params.dig("id")).to eq(provider.symbol.downcase) expect(params.dig("attributes", "symbol")).to eq(provider.symbol) - expect(params.dig("attributes", "system-email")).to eq(provider.system_email) + expect(params.dig("attributes", "system-email")).to eq( + provider.system_email, + ) expect(params.dig("attributes", "is-active")).to be true end end @@ -165,7 +227,9 @@ subject { create(:provider, password_input: password_input) } it "should use password_input" do - expect(subject.password).to eq(subject.encrypt_password_sha256(password_input)) + expect(subject.password).to eq( + subject.encrypt_password_sha256(password_input), + ) end it "should not use password_input when it is blank" do @@ -193,24 +257,28 @@ it "should reject string that is not a UUID" do provider.globus_uuid = "abc" expect(provider.save).to be false - expect(provider.errors.details).to eq(globus_uuid: [{ error: "abc is not a valid UUID" }]) + expect(provider.errors.details).to eq( + globus_uuid: [{ error: "abc is not a valid UUID" }], + ) end end describe "cumulative_years" do before(:each) do - allow(Time).to receive(:now).and_return(Time.mktime(2015, 4, 8)) - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) end it "should show all cumulative years" do provider = create(:provider) - expect(provider.cumulative_years).to eq([2015, 2016, 2017, 2018, 2019, 2020]) + expect(provider.cumulative_years).to eq( + [2_015, 2_016, 2_017, 2_018, 2_019, 2_020], + ) end it "should show years before deleted" do provider = create(:provider, deleted_at: "2018-06-14") - expect(provider.cumulative_years).to eq([2015, 2016, 2017]) + expect(provider.cumulative_years).to eq([2_015, 2_016, 2_017]) end it "empty if deleted in creation year" do diff --git a/spec/models/usage_report_spec.rb b/spec/models/usage_report_spec.rb index 7177ede57..d8051049e 100644 --- a/spec/models/usage_report_spec.rb +++ b/spec/models/usage_report_spec.rb @@ -5,10 +5,15 @@ describe UsageReport, type: :model, vcr: true do describe "find_by_id" do it "found" do - id = "https://api.test.datacite.org/reports/0498876e-dd55-42b0-b2a6-850df004a0e4" + id = + "https://api.test.datacite.org/reports/0498876e-dd55-42b0-b2a6-850df004a0e4" usage_reports = UsageReport.find_by(id: id) expect(usage_reports[:data].size).to eq(1) - expect(usage_reports[:data].first).to eq(id: "https://api.test.datacite.org/reports/0498876e-dd55-42b0-b2a6-850df004a0e4", reporting_period: { begin_date: "2018-10-01", end_date: "2018-10-31" }) + expect(usage_reports[:data].first).to eq( + id: + "https://api.test.datacite.org/reports/0498876e-dd55-42b0-b2a6-850df004a0e4", + reporting_period: { begin_date: "2018-10-01", end_date: "2018-10-31" }, + ) end it "not found" do @@ -24,8 +29,11 @@ usage_reports = UsageReport.query(query, page: {  number: 1, size: 25 }) expect(usage_reports.dig(:meta, "total")).to eq(309) expect(usage_reports[:data].size).to eq(25) - expect(usage_reports[:data].first).to eq(id: "https://api.test.datacite.org/reports/0148536f-bfbc-4775-b93c-524e91d0b9f6", - reporting_period: { begin_date: "2017-09-01", end_date: "2017-09-30" }) + expect(usage_reports[:data].first).to eq( + id: + "https://api.test.datacite.org/reports/0148536f-bfbc-4775-b93c-524e91d0b9f6", + reporting_period: { begin_date: "2017-09-01", end_date: "2017-09-30" }, + ) end it "size" do @@ -33,8 +41,11 @@ usage_reports = UsageReport.query(query, page: {  number: 1, size: 10 }) expect(usage_reports.dig(:meta, "total")).to eq(309) expect(usage_reports[:data].size).to eq(10) - expect(usage_reports[:data].first).to eq(id: "https://api.test.datacite.org/reports/0148536f-bfbc-4775-b93c-524e91d0b9f6", - reporting_period: { begin_date: "2017-09-01", end_date: "2017-09-30" }) + expect(usage_reports[:data].first).to eq( + id: + "https://api.test.datacite.org/reports/0148536f-bfbc-4775-b93c-524e91d0b9f6", + reporting_period: { begin_date: "2017-09-01", end_date: "2017-09-30" }, + ) end end end diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb index 05b115634..21eec8ab7 100644 --- a/spec/models/user_spec.rb +++ b/spec/models/user_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe User, type: :model do @@ -17,8 +19,15 @@ end describe "from basic_auth admin" do - let(:provider) { create(:provider, role_name: "ROLE_ADMIN", symbol: "ADMIN", password_input: "12345") } - let(:credentials) { provider.encode_auth_param(username: provider.symbol, password: 12345) } + let(:provider) do + create( + :provider, + role_name: "ROLE_ADMIN", symbol: "ADMIN", password_input: "12345", + ) + end + let(:credentials) do + provider.encode_auth_param(username: provider.symbol, password: 12_345) + end let(:user) { User.new(credentials, type: "basic") } describe "User attributes" do @@ -38,7 +47,9 @@ describe "from basic_auth provider" do let(:provider) { create(:provider, password_input: "12345") } - let(:credentials) { provider.encode_auth_param(username: provider.symbol, password: 12345) } + let(:credentials) do + provider.encode_auth_param(username: provider.symbol, password: 12_345) + end let(:user) { User.new(credentials, type: "basic") } describe "User attributes" do @@ -58,8 +69,12 @@ end describe "from basic_auth consortium" do - let(:provider) { create(:provider, password_input: "12345", role_name: "ROLE_CONSORTIUM") } - let(:credentials) { provider.encode_auth_param(username: provider.symbol, password: 12345) } + let(:provider) do + create(:provider, password_input: "12345", role_name: "ROLE_CONSORTIUM") + end + let(:credentials) do + provider.encode_auth_param(username: provider.symbol, password: 12_345) + end let(:user) { User.new(credentials, type: "basic") } describe "User attributes" do @@ -80,7 +95,9 @@ describe "from basic_auth client" do let(:client) { create(:client, password_input: "12345") } - let(:credentials) { client.encode_auth_param(username: client.symbol, password: 12345) } + let(:credentials) do + client.encode_auth_param(username: client.symbol, password: 12_345) + end let(:user) { User.new(credentials, type: "basic") } describe "User attributes" do @@ -104,8 +121,17 @@ end describe "reset client password", vcr: true do - let(:provider) { create(:provider, symbol: "DATACITE", password_input: "12345") } - let(:client) { create(:client, provider: provider, symbol: "DATACITE.DATACITE", system_email: "test@datacite.org") } + let(:provider) do + create(:provider, symbol: "DATACITE", password_input: "12345") + end + let(:client) do + create( + :client, + provider: provider, + symbol: "DATACITE.DATACITE", + system_email: "test@datacite.org", + ) + end it "sends message" do response = User.reset(client.symbol) diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 6ea471be1..b5f37fef1 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + ENV["RAILS_ENV"] = "test" ENV["TEST_CLUSTER_NODES"] = "1" @@ -55,9 +57,7 @@ ActiveJob::Base.queue_adapter = :test if Bullet.enable? - config.before(:each) do - Bullet.start_request - end + config.before(:each) { Bullet.start_request } config.after(:each) do Bullet.perform_out_of_channel_notifications if Bullet.notification? @@ -69,9 +69,14 @@ VCR.configure do |c| vcr_mode = /rec/i.match?(ENV["VCR_MODE"]) ? :all : :once - mds_token = Base64.strict_encode64("#{ENV['MDS_USERNAME']}:#{ENV['MDS_PASSWORD']}") - admin_token = Base64.strict_encode64("#{ENV['ADMIN_USERNAME']}:#{ENV['ADMIN_PASSWORD']}") - handle_token = Base64.strict_encode64("300%3A#{ENV['HANDLE_USERNAME']}:#{ENV['HANDLE_PASSWORD']}") + mds_token = + Base64.strict_encode64("#{ENV['MDS_USERNAME']}:#{ENV['MDS_PASSWORD']}") + admin_token = + Base64.strict_encode64("#{ENV['ADMIN_USERNAME']}:#{ENV['ADMIN_PASSWORD']}") + handle_token = + Base64.strict_encode64( + "300%3A#{ENV['HANDLE_USERNAME']}:#{ENV['HANDLE_PASSWORD']}", + ) mailgun_token = Base64.strict_encode64("api:#{ENV['MAILGUN_API_KEY']}") sqs_host = "sqs.#{ENV['AWS_REGION']}.amazonaws.com" diff --git a/spec/requests/activities_spec.rb b/spec/requests/activities_spec.rb index dd1d12492..59c40cb65 100644 --- a/spec/requests/activities_spec.rb +++ b/spec/requests/activities_spec.rb @@ -1,11 +1,33 @@ +# frozen_string_literal: true + require "rails_helper" describe ActivitiesController, type: :request do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) } + let(:client) do + create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) + end let(:doi) { create(:doi, client: client) } - let(:bearer) { Client.generate_token(role_id: "client_admin", uid: client.symbol, provider_id: provider.symbol.downcase, client_id: client.symbol.downcase, password: client.password) } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:bearer) do + Client.generate_token( + role_id: "client_admin", + uid: client.symbol, + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + password: client.password, + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end describe "activities for doi", elasticsearch: true do let!(:doi) { create(:doi, client: client) } @@ -19,18 +41,29 @@ context "without username" do it "returns the activities" do - get "/dois/#{doi.doi.downcase}/activities", params: nil, session: headers + get "/dois/#{doi.doi.downcase}/activities", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data").length).to eq(1) expect(json.dig("meta", "total")).to eq(1) expect(json.dig("data", 0, "attributes", "action")).to eq("create") - expect(json.dig("data", 0, "attributes", "changes", "aasm_state")).to eq("draft") - - expect(json.dig("data", 0, "attributes", "prov:wasAttributedTo")).to be_nil - expect(json.dig("data", 0, "attributes", "prov:wasGeneratedBy")).to be_present - expect(json.dig("data", 0, "attributes", "prov:generatedAtTime")).to be_present - expect(json.dig("data", 0, "attributes", "prov:wasDerivedFrom")).to be_present + expect( + json.dig("data", 0, "attributes", "changes", "aasm_state"), + ).to eq("draft") + + expect( + json.dig("data", 0, "attributes", "prov:wasAttributedTo"), + ).to be_nil + expect( + json.dig("data", 0, "attributes", "prov:wasGeneratedBy"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:generatedAtTime"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasDerivedFrom"), + ).to be_present end end end @@ -47,17 +80,26 @@ context "repository" do it "returns the activities" do - get "/repositories/#{client.symbol.downcase}/activities", params: nil, session: headers + get "/repositories/#{client.symbol.downcase}/activities", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data").length).to eq(1) expect(json.dig("meta", "total")).to eq(1) expect(json.dig("data", 0, "attributes", "action")).to eq("create") - expect(json.dig("data", 0, "attributes", "prov:wasAttributedTo")).to be_nil - expect(json.dig("data", 0, "attributes", "prov:wasGeneratedBy")).to be_present - expect(json.dig("data", 0, "attributes", "prov:generatedAtTime")).to be_present - expect(json.dig("data", 0, "attributes", "prov:wasDerivedFrom")).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasAttributedTo"), + ).to be_nil + expect( + json.dig("data", 0, "attributes", "prov:wasGeneratedBy"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:generatedAtTime"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasDerivedFrom"), + ).to be_present end end end @@ -74,17 +116,26 @@ context "provider" do it "returns the activities" do - get "/providers/#{provider.symbol.downcase}/activities", params: nil, session: headers + get "/providers/#{provider.symbol.downcase}/activities", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data").length).to eq(1) expect(json.dig("meta", "total")).to eq(1) expect(json.dig("data", 0, "attributes", "action")).to eq("create") - expect(json.dig("data", 0, "attributes", "prov:wasAttributedTo")).to be_nil - expect(json.dig("data", 0, "attributes", "prov:wasGeneratedBy")).to be_present - expect(json.dig("data", 0, "attributes", "prov:generatedAtTime")).to be_present - expect(json.dig("data", 0, "attributes", "prov:wasDerivedFrom")).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasAttributedTo"), + ).to be_nil + expect( + json.dig("data", 0, "attributes", "prov:wasGeneratedBy"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:generatedAtTime"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasDerivedFrom"), + ).to be_present end end end @@ -101,18 +152,29 @@ context "query" do it "returns the activities" do - get "/activities?query=#{doi.doi.downcase}", params: nil, session: headers + get "/activities?query=#{doi.doi.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data").length).to eq(1) expect(json.dig("meta", "total")).to eq(1) expect(json.dig("data", 0, "attributes", "action")).to eq("create") - expect(json.dig("data", 0, "attributes", "changes", "aasm_state")).to eq("draft") - - expect(json.dig("data", 0, "attributes", "prov:wasAttributedTo")).to be_nil - expect(json.dig("data", 0, "attributes", "prov:wasGeneratedBy")).to be_present - expect(json.dig("data", 0, "attributes", "prov:generatedAtTime")).to be_present - expect(json.dig("data", 0, "attributes", "prov:wasDerivedFrom")).to be_present + expect( + json.dig("data", 0, "attributes", "changes", "aasm_state"), + ).to eq("draft") + + expect( + json.dig("data", 0, "attributes", "prov:wasAttributedTo"), + ).to be_nil + expect( + json.dig("data", 0, "attributes", "prov:wasGeneratedBy"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:generatedAtTime"), + ).to be_present + expect( + json.dig("data", 0, "attributes", "prov:wasDerivedFrom"), + ).to be_present end end end diff --git a/spec/requests/client_prefixes_spec.rb b/spec/requests/client_prefixes_spec.rb index 2a54f77e2..b8f8de125 100644 --- a/spec/requests/client_prefixes_spec.rb +++ b/spec/requests/client_prefixes_spec.rb @@ -1,14 +1,28 @@ +# frozen_string_literal: true + require "rails_helper" describe "Client Prefixes", type: :request, elasticsearch: true do let(:prefix) { create(:prefix) } let(:provider) { create(:provider) } let(:client) { create(:client, provider: provider) } - let(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } + let(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end let!(:client_prefixes) { create_list(:client_prefix, 5) } - let(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix: provider_prefix) } + let(:client_prefix) do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix: provider_prefix, + ) + end let(:bearer) { User.generate_token(role_id: "staff_admin") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end describe "GET /client-prefixes" do before do @@ -34,7 +48,8 @@ context "when the record exists" do it "returns the client-prefix" do - get "/client-prefixes/#{client_prefix.uid}", params: nil, session: headers + get "/client-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data", "id")).to eq(client_prefix.uid) @@ -46,17 +61,23 @@ get "/client-prefixes/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "PATCH /client-prefixes/:uid" do it "returns method not supported error" do - patch "/client-prefixes/#{client_prefix.uid}", params: nil, session: headers + patch "/client-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(405) - expect(json.dig("errors")).to eq([{ "status" => "405", "title" => "Method not allowed" }]) + expect(json.dig("errors")).to eq( + [{ "status" => "405", "title" => "Method not allowed" }], + ) end end @@ -68,23 +89,12 @@ "type" => "client-prefixes", "relationships": { "client": { - "data": { - "type": "client", - "id": client.symbol.downcase, - }, + "data": { "type": "client", "id": client.symbol.downcase }, }, "providerPrefix": { - "data": { - "type": "provider-prefix", - "id": provider_prefix.uid, - }, - }, - "prefix": { - "data": { - "type": "prefix", - "id": prefix.uid, - }, + "data": { "type": "provider-prefix", "id": provider_prefix.uid }, }, + "prefix": { "data": { "type": "prefix", "id": prefix.uid } }, }, }, } @@ -101,18 +111,16 @@ context "when the request is invalid" do let!(:client) { create(:client) } let(:not_valid_attributes) do - { - "data" => { - "type" => "client-prefixes", - }, - } + { "data" => { "type" => "client-prefixes" } } end it "returns status code 422" do post "/client-prefixes", params: not_valid_attributes, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "client", "title" => "Must exist") + expect(json["errors"].first).to eq( + "source" => "client", "title" => "Must exist", + ) end end end @@ -126,7 +134,8 @@ end it "deletes the prefix" do - delete "/client-prefixes/#{client_prefix.uid}", params: nil, session: headers + delete "/client-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(204) end end diff --git a/spec/requests/clients_spec.rb b/spec/requests/clients_spec.rb index 0fc17202f..f1e3f5ab3 100644 --- a/spec/requests/clients_spec.rb +++ b/spec/requests/clients_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ClientsController, type: :request, elasticsearch: true do @@ -6,23 +8,29 @@ let(:provider) { create(:provider, password_input: "12345") } let!(:client) { create(:client, provider: provider) } let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "symbol" => provider.symbol + ".IMPERIAL", - "name" => "Imperial College", - "contactEmail" => "bob@example.com", - "clientType" => "repository", - }, - "relationships": { - "provider": { - "data": { - "type": "providers", - "id": provider.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "clients", + "attributes" => { + "symbol" => provider.symbol + ".IMPERIAL", + "name" => "Imperial College", + "contactEmail" => "bob@example.com", + "clientType" => "repository", + }, + "relationships": { + "provider": { + "data": { "type": "providers", "id": provider.symbol.downcase }, + }, + }, + }, + } + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } end - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } let(:query) { "jamon" } describe "GET /clients", elasticsearch: true do @@ -79,7 +87,9 @@ expect(last_response.status).to eq(200) expect(json.dig("data", "attributes", "name")).to eq(client.name) - expect(json.dig("data", "attributes", "globusUuid")).to eq("bc7d0274-3472-4a79-b631-e4c7baccc667") + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "bc7d0274-3472-4a79-b631-e4c7baccc667", + ) end end @@ -88,14 +98,23 @@ get "/clients/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "GET /clients/totals" do let(:client) { create(:client) } - let!(:datacite_dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end before do Client.import @@ -108,7 +127,9 @@ expect(last_response.status).to eq(200) expect(json.first.dig("count")).to eq(3) - expect(json.first.dig("states")).to eq([{ "count" => 3, "id" => "findable", "title" => "Findable" }]) + expect(json.first.dig("states")).to eq( + [{ "count" => 3, "id" => "findable", "title" => "Findable" }], + ) expect(json.first.dig("temporal")).not_to be_nil end end @@ -125,7 +146,9 @@ expect(attributes["clientType"]).to eq("repository") relationships = json.dig("data", "relationships") - expect(relationships.dig("provider", "data", "id")).to eq(provider.symbol.downcase) + expect(relationships.dig("provider", "data", "id")).to eq( + provider.symbol.downcase, + ) Client.import sleep 2 @@ -133,25 +156,28 @@ get "/clients", params: nil, session: headers expect(json["data"].size).to eq(2) - expect(json.dig("meta", "clientTypes")).to eq([{ "count" => 2, "id" => "repository", "title" => "Repository" }]) + expect(json.dig("meta", "clientTypes")).to eq( + [{ "count" => 2, "id" => "repository", "title" => "Repository" }], + ) end end context "when the request is invalid" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "symbol" => provider.symbol + ".IMPERIAL", - "name" => "Imperial College", - }, - "relationships": { - "provider": { - "data": { - "type": "providers", - "id": provider.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "clients", + "attributes" => { + "symbol" => provider.symbol + ".IMPERIAL", + "name" => "Imperial College", + }, + "relationships": { + "provider": { + "data": { "type": "providers", "id": provider.symbol.downcase }, + }, + }, + }, + } end it "returns status code 422" do @@ -163,7 +189,12 @@ it "returns a validation failure message" do post "/clients", params: params, session: headers - expect(json["errors"]).to eq([{ "source" => "system_email", "title" => "Can't be blank" }, { "source" => "system_email", "title" => "Is invalid" }]) + expect(json["errors"]).to eq( + [ + { "source" => "system_email", "title" => "Can't be blank" }, + { "source" => "system_email", "title" => "Is invalid" }, + ], + ) end end end @@ -171,29 +202,39 @@ describe "PUT /clients/:id" do context "when the record exists" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "name" => "Imperial College 2", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - } } } + { + "data" => { + "type" => "clients", + "attributes" => { + "name" => "Imperial College 2", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + }, + }, + } end it "updates the record" do put "/clients/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Imperial College 2") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") + expect(json.dig("data", "attributes", "name")).to eq( + "Imperial College 2", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) end end context "change client_type" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "clientType" => "periodical", - } } } + { + "data" => { + "type" => "clients", + "attributes" => { "clientType" => "periodical" }, + }, + } end it "updates the record" do @@ -206,10 +247,11 @@ context "removes the globus_uuid" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "globusUuid" => nil, - } } } + { + "data" => { + "type" => "clients", "attributes" => { "globusUuid" => nil } + }, + } end it "updates the record" do @@ -222,20 +264,32 @@ end context "transfer repository" do - let(:new_provider) { create(:provider, symbol: "QUECHUA", password_input: "12345") } + let(:new_provider) do + create(:provider, symbol: "QUECHUA", password_input: "12345") + end let!(:prefixes) { create_list(:prefix, 3) } let!(:prefix) { prefixes.first } - let!(:provider_prefix_more) { create(:provider_prefix, provider: provider, prefix: prefixes.last) } - let!(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid) } + let!(:provider_prefix_more) do + create(:provider_prefix, provider: provider, prefix: prefixes.last) + end + let!(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end + let!(:client_prefix) do + create( + :client_prefix, + client: client, + prefix: prefix, + provider_prefix_id: provider_prefix.uid, + ) + end let(:doi) { create_list(:doi, 10, client: client) } let(:params) do { "data" => { "type" => "clients", "attributes" => { - "mode" => "transfer", - "targetId" => new_provider.symbol, + "mode" => "transfer", "targetId" => new_provider.symbol }, }, } @@ -246,72 +300,106 @@ expect(last_response.status).to eq(200) expect(json.dig("data", "attributes", "name")).to eq("My data center") - expect(json.dig("data", "relationships", "provider", "data", "id")).to eq("quechua") - expect(json.dig("data", "relationships", "prefixes", "data").first.dig("id")).to eq(prefix.uid) + expect( + json.dig("data", "relationships", "provider", "data", "id"), + ).to eq("quechua") + expect( + json.dig("data", "relationships", "prefixes", "data").first.dig("id"), + ).to eq(prefix.uid) get "/providers/#{provider.symbol}" - expect(json.dig("data", "relationships", "prefixes", "data").length).to eq(1) - expect(json.dig("data", "relationships", "prefixes", "data").first.dig("id")).to eq(prefixes.last.uid) + expect( + json.dig("data", "relationships", "prefixes", "data").length, + ).to eq(1) + expect( + json.dig("data", "relationships", "prefixes", "data").first.dig("id"), + ).to eq(prefixes.last.uid) get "/providers/#{new_provider.symbol}" - expect(json.dig("data", "relationships", "prefixes", "data").first.dig("id")).to eq(prefix.uid) + expect( + json.dig("data", "relationships", "prefixes", "data").first.dig("id"), + ).to eq(prefix.uid) get "/prefixes/#{prefix.uid}" - expect(json.dig("data", "relationships", "clients", "data").first.dig("id")).to eq(client.symbol.downcase) + expect( + json.dig("data", "relationships", "clients", "data").first.dig("id"), + ).to eq(client.symbol.downcase) end end context "invalid globus_uuid" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "globusUuid" => "abc", - } } } + { + "data" => { + "type" => "clients", "attributes" => { "globusUuid" => "abc" } + }, + } end it "updates the record" do put "/clients/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "globus_uuid", "title" => "Abc is not a valid UUID") + expect(json["errors"].first).to eq( + "source" => "globus_uuid", "title" => "Abc is not a valid UUID", + ) end end context "using basic auth", vcr: true do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "name" => "Imperial College 2", - } } } + { + "data" => { + "type" => "clients", + "attributes" => { "name" => "Imperial College 2" }, + }, + } + end + let(:credentials) do + provider.encode_auth_param( + username: provider.symbol.downcase, password: "12345", + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Basic " + credentials, + } end - let(:credentials) { provider.encode_auth_param(username: provider.symbol.downcase, password: "12345") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + credentials } } it "updates the record" do put "/clients/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Imperial College 2") + expect(json.dig("data", "attributes", "name")).to eq( + "Imperial College 2", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) end end context "when the request is invalid" do let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "symbol" => client.symbol + "M", - "email" => "bob@example.com", - "name" => "Imperial College", - } } } + { + "data" => { + "type" => "clients", + "attributes" => { + "symbol" => client.symbol + "M", + "email" => "bob@example.com", + "name" => "Imperial College", + }, + }, + } end it "returns a validation failure message" do put "/clients/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "symbol", "title" => "Cannot be changed") + expect(json["errors"].first).to eq( + "source" => "symbol", "title" => "Cannot be changed", + ) end end end @@ -333,19 +421,30 @@ it "returns a validation failure message" do delete "/clients/xxx", params: nil, session: headers - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "doi transfer", elasticsearch: true do let!(:dois) { create_list(:doi, 3, client: client) } - let(:target) { create(:client, provider: provider, symbol: provider.symbol + ".TARGET", name: "Target Client") } + let(:target) do + create( + :client, + provider: provider, + symbol: provider.symbol + ".TARGET", + name: "Target Client", + ) + end let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "targetId" => target.symbol, - } } } + { + "data" => { + "type" => "clients", "attributes" => { "targetId" => target.symbol } + }, + } end before do diff --git a/spec/requests/datacite_dois_spec.rb b/spec/requests/datacite_dois_spec.rb index a05ba87ab..27b783d71 100644 --- a/spec/requests/datacite_dois_spec.rb +++ b/spec/requests/datacite_dois_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataciteDoisController, type: :request, vcr: true do diff --git a/spec/requests/events_spec.rb b/spec/requests/events_spec.rb index 0b636a1d1..70c8a6e5f 100644 --- a/spec/requests/events_spec.rb +++ b/spec/requests/events_spec.rb @@ -4,11 +4,18 @@ describe EventsController, type: :request, elasticsearch: true, vcr: true do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) } + let(:client) do + create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) + end before(:each) do - allow(Time).to receive(:now).and_return(Time.mktime(2015, 4, 8)) - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) end let(:event) { build(:event) } @@ -16,7 +23,8 @@ # Successful response from creating via the API. let(:success) do - { "id" => event.uuid, + { + "id" => event.uuid, "type" => "events", "attributes" => { "subjId" => "http://www.citeulike.org/user/dbogartoit", @@ -28,42 +36,60 @@ "total" => 1, "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "occurredAt" => "2015-04-08T00:00:00.000Z", - "subj" => { "@id" => "http://www.citeulike.org/user/dbogartoit", - "@type" => "CreativeWork", - "author" => [{ "givenName" => "dbogartoit" }], - "name" => "CiteULike bookmarks for user dbogartoit", - "publisher" => { "@type" => "Organization", "name" => "CiteULike" }, - "periodical" => { "@type" => "Periodical", "@id" => "https://doi.org/10.13039/100011326", "name" => "CiteULike", "issn" => "9812-847X" }, - "funder" => { "@type" => "Organization", "@id" => "https://doi.org/10.13039/100011326", "name" => "CiteULike" }, - "version" => "1.0", - "proxyIdentifiers" => ["10.13039/100011326"], - "datePublished" => "2006-06-13T16:14:19Z", - "dateModified" => "2006-06-13T16:14:19Z", - "url" => "http://www.citeulike.org/user/dbogartoit" }, + "subj" => { + "@id" => "http://www.citeulike.org/user/dbogartoit", + "@type" => "CreativeWork", + "author" => [{ "givenName" => "dbogartoit" }], + "name" => "CiteULike bookmarks for user dbogartoit", + "publisher" => { "@type" => "Organization", "name" => "CiteULike" }, + "periodical" => { + "@type" => "Periodical", + "@id" => "https://doi.org/10.13039/100011326", + "name" => "CiteULike", + "issn" => "9812-847X", + }, + "funder" => { + "@type" => "Organization", + "@id" => "https://doi.org/10.13039/100011326", + "name" => "CiteULike", + }, + "version" => "1.0", + "proxyIdentifiers" => %w[10.13039/100011326], + "datePublished" => "2006-06-13T16:14:19Z", + "dateModified" => "2006-06-13T16:14:19Z", + "url" => "http://www.citeulike.org/user/dbogartoit", + }, "obj" => {}, - } } + }, + } end let(:token) { User.generate_token(role_id: "staff_admin") } let(:uuid) { SecureRandom.uuid } let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json; version=2", - "HTTP_AUTHORIZATION" => "Bearer #{token}" } + { + "HTTP_ACCEPT" => "application/vnd.api+json; version=2", + "HTTP_AUTHORIZATION" => "Bearer #{token}", + } end context "create" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subjId" => event.subj_id, - "subj" => event.subj, - "objId" => event.obj_id, - "relationTypeId" => event.relation_type_id, - "sourceId" => event.source_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subjId" => event.subj_id, + "subj" => event.subj, + "objId" => event.obj_id, + "relationTypeId" => event.relation_type_id, + "sourceId" => event.source_id, + "sourceToken" => event.source_token, + }, + }, + } end context "as admin user" do @@ -78,17 +104,23 @@ end context "with very long url" do - let(:url) { "http://navigator.eumetsat.int/soapservices/cswstartup?service=csw&version=2.0.2&request=getrecordbyid&outputschema=http%3A%2F%2Fwww.isotc211.org%2F2005%2Fgmd&id=eo%3Aeum%3Adat%3Amult%3Arac-m11-iasia" } + let(:url) do + "http://navigator.eumetsat.int/soapservices/cswstartup?service=csw&version=2.0.2&request=getrecordbyid&outputschema=http%3A%2F%2Fwww.isotc211.org%2F2005%2Fgmd&id=eo%3Aeum%3Adat%3Amult%3Arac-m11-iasia" + end let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => event.subj_id, - "subj" => event.subj, - "objId" => url, - "relationTypeId" => event.relation_type_id, - "sourceId" => "datacite-url", - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => event.subj_id, + "subj" => event.subj, + "objId" => url, + "relationTypeId" => event.relation_type_id, + "sourceId" => "datacite-url", + "sourceToken" => event.source_token, + }, + }, + } end it "JSON" do @@ -108,7 +140,14 @@ post uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -120,72 +159,99 @@ post uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "without sourceToken" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "subjId" => event.subj_id, - "sourceId" => event.source_id, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "subjId" => event.subj_id, + "sourceId" => event.source_id, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source token can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source token can't be blank" }], + ) expect(json["data"]).to be_nil end end context "without sourceId" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "subjId" => event.subj_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "subjId" => event.subj_id, + "sourceToken" => event.source_token, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source can't be blank" }], + ) expect(json["data"]).to be_blank end end context "without subjId" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "sourceId" => event.source_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "sourceId" => event.source_id, + "sourceToken" => event.source_token, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Subj can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Subj can't be blank" }], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json; version=2", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json; version=2", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -199,11 +265,12 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "sourceToken" => "123", - } } } + { + "event" => { + "type" => "events", + "attributes" => { "uuid" => uuid, "sourceToken" => "123" }, + }, + } end it "JSON" do @@ -216,7 +283,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do post uri, params: params, session: headers @@ -245,16 +317,32 @@ context "with registrant information" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.18713/jimis-170117-1-2", - "subj" => { "@id": "https://doi.org/10.18713/jimis-170117-1-2", "@type": "ScholarlyArticle", "datePublished": "2017", "proxyIdentifiers": [], "registrantId": "datacite.inist.umr7300" }, - "obj" => { "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", "@type": "ScholarlyArticle", "datePublished": "2013-09", "proxyIdentifiers": ["13646826"], "registrantId": "datacite.crossref.citations" }, - "objId" => "https://doi.org/10.1016/j.jastp.2013.05.001", - "relationTypeId" => "references", - "sourceId" => "datacite-crossref", - "sourceToken" => "sourceToken", - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.18713/jimis-170117-1-2", + "subj" => { + "@id": "https://doi.org/10.18713/jimis-170117-1-2", + "@type": "ScholarlyArticle", + "datePublished": "2017", + "proxyIdentifiers": [], + "registrantId": "datacite.inist.umr7300", + }, + "obj" => { + "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", + "@type": "ScholarlyArticle", + "datePublished": "2013-09", + "proxyIdentifiers": %w[13646826], + "registrantId": "datacite.crossref.citations", + }, + "objId" => "https://doi.org/10.1016/j.jastp.2013.05.001", + "relationTypeId" => "references", + "sourceId" => "datacite-crossref", + "sourceToken" => "sourceToken", + }, + }, + } end it "has registrant aggregation" do @@ -263,30 +351,50 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).not_to eq(event.uuid) - expect(json.dig("data", "attributes", "objId")).to eq("https://doi.org/10.1016/j.jastp.2013.05.001") + expect(json.dig("data", "attributes", "objId")).to eq( + "https://doi.org/10.1016/j.jastp.2013.05.001", + ) Event.import sleep 2 get uri, params: nil, session: headers expect(json.dig("meta", "registrants", 0, "count")).to eq(1) - expect(json.dig("meta", "registrants", 0, "id")).to eq("datacite.crossref.citations") + expect(json.dig("meta", "registrants", 0, "id")).to eq( + "datacite.crossref.citations", + ) end end context "with nested attributes" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.18713/jimis-170117-1-2", - "subj" => { "@id": "https://doi.org/10.18713/jimis-170117-1-2", "@type": "ScholarlyArticle", "datePublished": "2017", "proxyIdentifiers": [], "registrantId": "datacite.inist.umr7300" }, - "obj" => { "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", "@type": "ScholarlyArticle", "datePublished": "2013-09", "proxyIdentifiers": ["13646826"], "registrantId": "datacite.crossref.citations" }, - "objId" => "https://doi.org/10.1016/j.jastp.2013.05.001", - "relationTypeId" => "references", - "sourceId" => "datacite-crossref", - "sourceToken" => "sourceToken", - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.18713/jimis-170117-1-2", + "subj" => { + "@id": "https://doi.org/10.18713/jimis-170117-1-2", + "@type": "ScholarlyArticle", + "datePublished": "2017", + "proxyIdentifiers": [], + "registrantId": "datacite.inist.umr7300", + }, + "obj" => { + "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", + "@type": "ScholarlyArticle", + "datePublished": "2013-09", + "proxyIdentifiers": %w[13646826], + "registrantId": "datacite.crossref.citations", + }, + "objId" => "https://doi.org/10.1016/j.jastp.2013.05.001", + "relationTypeId" => "references", + "sourceId" => "datacite-crossref", + "sourceToken" => "sourceToken", + }, + }, + } end it "are correctly stored" do @@ -304,13 +412,17 @@ context "create crossref doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.7554/elife.01567", - "sourceId" => "crossref-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.7554/elife.01567", + "sourceId" => "crossref-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "registered" do @@ -319,20 +431,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.7554/elife.01567") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.7554/elife.01567", + ) end end context "create crossref doi not found", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.3389/fmicb.2019.01425", - "sourceId" => "crossref-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.3389/fmicb.2019.01425", + "sourceId" => "crossref-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "not registered" do @@ -341,20 +459,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.3389/fmicb.2019.01425") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.3389/fmicb.2019.01425", + ) end end context "create medra doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.3280/ecag2018-001005", - "sourceId" => "medra-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.3280/ecag2018-001005", + "sourceId" => "medra-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "registered" do @@ -363,20 +487,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.3280/ecag2018-001005") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.3280/ecag2018-001005", + ) end end context "create kisti doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.5012/bkcs.2013.34.10.2889", - "sourceId" => "kisti-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.5012/bkcs.2013.34.10.2889", + "sourceId" => "kisti-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "registered" do @@ -385,20 +515,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.5012/bkcs.2013.34.10.2889") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.5012/bkcs.2013.34.10.2889", + ) end end context "create jalc doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.1241/johokanri.39.979", - "sourceId" => "jalc-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.1241/johokanri.39.979", + "sourceId" => "jalc-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "registered" do @@ -407,20 +543,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.1241/johokanri.39.979") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.1241/johokanri.39.979", + ) end end context "create op doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subjId" => "https://doi.org/10.2903/j.efsa.2018.5239", - "sourceId" => "op-import", - "relationTypeId" => nil, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subjId" => "https://doi.org/10.2903/j.efsa.2018.5239", + "sourceId" => "op-import", + "relationTypeId" => nil, + "sourceToken" => event.source_token, + }, + }, + } end it "registered" do @@ -429,23 +571,29 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subjId")).to eq("https://doi.org/10.2903/j.efsa.2018.5239") + expect(json.dig("data", "attributes", "subjId")).to eq( + "https://doi.org/10.2903/j.efsa.2018.5239", + ) end end context "upsert" do let(:uri) { "/events/#{event.uuid}" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subjId" => event.subj_id, - "subj" => event.subj, - "objId" => event.obj_id, - "relationTypeId" => event.relation_type_id, - "sourceId" => event.source_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subjId" => event.subj_id, + "subj" => event.subj, + "objId" => event.obj_id, + "relationTypeId" => event.relation_type_id, + "sourceId" => event.source_id, + "sourceToken" => event.source_token, + }, + }, + } end context "as admin user" do @@ -466,7 +614,14 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -478,72 +633,96 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "without sourceToken" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "subjId" => event.subj_id, - "sourceId" => event.source_id, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "subjId" => event.subj_id, "sourceId" => event.source_id + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source token can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source token can't be blank" }], + ) expect(json["data"]).to be_nil end end context "without sourceId" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "subjId" => event.subj_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "subjId" => event.subj_id, "sourceToken" => event.source_token + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source can't be blank" }], + ) expect(json["data"]).to be_blank end end context "without subjId" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "sourceId" => event.source_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "sourceId" => event.source_id, "sourceToken" => event.source_token + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Subj can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Subj can't be blank" }], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json; version=2", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json; version=2", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -557,11 +736,13 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "id" => uuid, - "attributes" => { - "sourceToken" => "123", - } } } + { + "event" => { + "type" => "events", + "id" => uuid, + "attributes" => { "sourceToken" => "123" }, + }, + } end it "JSON" do @@ -574,7 +755,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do put uri, params: params, session: headers @@ -606,16 +792,20 @@ let(:uri) { "/events/#{event.uuid}?include=dois" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subjId" => event.subj_id, - "subj" => event.subj, - "objId" => event.obj_id, - "relationTypeId" => event.relation_type_id, - "sourceId" => event.source_id, - "sourceToken" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subjId" => event.subj_id, + "subj" => event.subj, + "objId" => event.obj_id, + "relationTypeId" => event.relation_type_id, + "sourceId" => event.source_id, + "sourceToken" => event.source_token, + }, + }, + } end context "as admin user" do @@ -635,7 +825,14 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -647,15 +844,24 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json; version=2", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json; version=2", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -669,11 +875,13 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "id" => uuid, - "attributes" => { - "sourceToken" => "123", - } } } + { + "event" => { + "type" => "events", + "id" => uuid, + "attributes" => { "sourceToken" => "123" }, + }, + } end it "JSON" do @@ -686,7 +894,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do put uri, params: params, session: headers @@ -703,7 +916,14 @@ context "show" do let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } - let!(:event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by") } + let!(:event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + ) + end let(:uri) { "/events/#{event.uuid}?include=doi-for-source,doi-for-target" } @@ -718,11 +938,21 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "relationTypeId")).to eq("is-referenced-by") - expect(json.dig("data", "attributes", "sourceDoi")).to eq(source_doi.doi.downcase) - expect(json.dig("data", "attributes", "targetDoi")).to eq(doi.doi.downcase) - expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq("references") - expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq("citations") + expect(json.dig("data", "attributes", "relationTypeId")).to eq( + "is-referenced-by", + ) + expect(json.dig("data", "attributes", "sourceDoi")).to eq( + source_doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "targetDoi")).to eq( + doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq( + "references", + ) + expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq( + "citations", + ) end end @@ -733,11 +963,21 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "relationTypeId")).to eq("is-referenced-by") - expect(json.dig("data", "attributes", "sourceDoi")).to eq(source_doi.doi.downcase) - expect(json.dig("data", "attributes", "targetDoi")).to eq(doi.doi.downcase) - expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq("references") - expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq("citations") + expect(json.dig("data", "attributes", "relationTypeId")).to eq( + "is-referenced-by", + ) + expect(json.dig("data", "attributes", "sourceDoi")).to eq( + source_doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "targetDoi")).to eq( + doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq( + "references", + ) + expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq( + "citations", + ) end end @@ -748,11 +988,21 @@ get uri, params: nil, session: headers puts last_response.body expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "relationTypeId")).to eq("is-referenced-by") - expect(json.dig("data", "attributes", "sourceDoi")).to eq(source_doi.doi.downcase) - expect(json.dig("data", "attributes", "targetDoi")).to eq(doi.doi.downcase) - expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq("references") - expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq("citations") + expect(json.dig("data", "attributes", "relationTypeId")).to eq( + "is-referenced-by", + ) + expect(json.dig("data", "attributes", "sourceDoi")).to eq( + source_doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "targetDoi")).to eq( + doi.doi.downcase, + ) + expect(json.dig("data", "attributes", "sourceRelationTypeId")).to eq( + "references", + ) + expect(json.dig("data", "attributes", "targetRelationTypeId")).to eq( + "citations", + ) end end @@ -763,7 +1013,14 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -778,8 +1035,11 @@ # Exclude the token header. let(:headers) do - { "HTTP_ACCEPT" => "application/json", - "HTTP_USER_AGENT" => "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" } + { + "HTTP_ACCEPT" => "application/json", + "HTTP_USER_AGENT" => + "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)", + } end it "json" do @@ -851,7 +1111,14 @@ delete uri, params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) expect(json["data"]).to be_nil end end diff --git a/spec/requests/exports_spec.rb b/spec/requests/exports_spec.rb index 93e5bc50f..241b26e84 100644 --- a/spec/requests/exports_spec.rb +++ b/spec/requests/exports_spec.rb @@ -1,11 +1,33 @@ +# frozen_string_literal: true + require "rails_helper" describe ExportsController, type: :request do let(:admin_bearer) { User.generate_token } - let(:admin_headers) { { "HTTP_ACCEPT" => "text/csv", "HTTP_AUTHORIZATION" => "Bearer " + admin_bearer } } + let(:admin_headers) do + { + "HTTP_ACCEPT" => "text/csv", + "HTTP_AUTHORIZATION" => "Bearer " + admin_bearer, + } + end - let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM", name: "Virtual Library of Virginia", symbol: "VIVA") } - let!(:provider) { create(:provider, role_name: "ROLE_CONSORTIUM_ORGANIZATION", name: "University of Virginia", symbol: "UVA", consortium: consortium) } + let(:consortium) do + create( + :provider, + role_name: "ROLE_CONSORTIUM", + name: "Virtual Library of Virginia", + symbol: "VIVA", + ) + end + let!(:provider) do + create( + :provider, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + name: "University of Virginia", + symbol: "UVA", + consortium: consortium, + ) + end describe "GET /export/organizations", elasticsearch: true do before do @@ -19,25 +41,37 @@ expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(3) - expect(csv[0]).to start_with("Name,fabricaAccountId,Parent Organization,Is Active") + expect(csv[0]).to start_with( + "Name,fabricaAccountId,Parent Organization,Is Active", + ) expect(csv[1]).to start_with("Virtual Library of Virginia,VIVA,,true") expect(csv[2]).to start_with("University of Virginia,UVA,VIVA,true") end it "returns organizations from date", vcr: false do - get "/export/organizations?from-date=#{Date.today}", params: nil, session: admin_headers + get "/export/organizations?from-date=#{Date.today}", + params: nil, session: admin_headers expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(3) - expect(csv[0]).to start_with("Name,fabricaAccountId,Parent Organization,Is Active") + expect(csv[0]).to start_with( + "Name,fabricaAccountId,Parent Organization,Is Active", + ) expect(csv[1]).to start_with("Virtual Library of Virginia,VIVA,,true") expect(csv[2]).to start_with("University of Virginia,UVA,VIVA,true") end end describe "GET /export/repositories", elasticsearch: true do - let(:client) { create(:client, provider: provider, symbol: "UVA.LIBRARY", name: "University of Virginia Library") } + let(:client) do + create( + :client, + provider: provider, + symbol: "UVA.LIBRARY", + name: "University of Virginia Library", + ) + end let!(:dois) { create_list(:doi, 3, client: client, aasm_state: "findable") } before do @@ -52,20 +86,29 @@ expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(2) - expect(csv[0]).to start_with("Repository Name,Repository ID,Organization,isActive") - expect(csv[1]).to start_with("University of Virginia Library,UVA.LIBRARY,UVA,true") + expect(csv[0]).to start_with( + "Repository Name,Repository ID,Organization,isActive", + ) + expect(csv[1]).to start_with( + "University of Virginia Library,UVA.LIBRARY,UVA,true", + ) dois_total = csv[1].strip.split(",").last.to_i expect(dois_total).to eq(3) end it "returns repositories from date", vcr: false do - get "/export/repositories?from-date=#{Date.today}", params: nil, session: admin_headers + get "/export/repositories?from-date=#{Date.today}", + params: nil, session: admin_headers expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(2) - expect(csv[0]).to start_with("Repository Name,Repository ID,Organization,isActive") - expect(csv[1]).to start_with("University of Virginia Library,UVA.LIBRARY,UVA,true") + expect(csv[0]).to start_with( + "Repository Name,Repository ID,Organization,isActive", + ) + expect(csv[1]).to start_with( + "University of Virginia Library,UVA.LIBRARY,UVA,true", + ) dois_total = csv[1].strip.split(",").last.to_i expect(dois_total).to eq(3) end @@ -83,32 +126,69 @@ expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(9) - expect(csv[0]).to eq("fabricaAccountId,fabricaId,email,firstName,lastName,type\n") - expect(csv[1]).to start_with("VIVA,VIVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical") - expect(csv[2]).to start_with("VIVA,VIVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService") - expect(csv[3]).to start_with("VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting") - expect(csv[4]).to start_with("VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") - expect(csv[5]).to start_with("UVA,UVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical") - expect(csv[6]).to start_with("UVA,UVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService") - expect(csv[7]).to start_with("UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting") - expect(csv[8]).to start_with("UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") + expect(csv[0]).to eq( + "fabricaAccountId,fabricaId,email,firstName,lastName,type\n", + ) + expect(csv[1]).to start_with( + "VIVA,VIVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical", + ) + expect(csv[2]).to start_with( + "VIVA,VIVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService", + ) + expect(csv[3]).to start_with( + "VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) + expect(csv[4]).to start_with( + "VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) + expect(csv[5]).to start_with( + "UVA,UVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical", + ) + expect(csv[6]).to start_with( + "UVA,UVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService", + ) + expect(csv[7]).to start_with( + "UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) + expect(csv[8]).to start_with( + "UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) end it "returns all contacts from date", vcr: false do - get "/export/contacts?from-date=#{Date.today}", params: nil, session: admin_headers + get "/export/contacts?from-date=#{Date.today}", + params: nil, session: admin_headers expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(9) - expect(csv[0]).to eq("fabricaAccountId,fabricaId,email,firstName,lastName,type\n") - expect(csv[1]).to start_with("VIVA,VIVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical") - expect(csv[2]).to start_with("VIVA,VIVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService") - expect(csv[3]).to start_with("VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting") - expect(csv[4]).to start_with("VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") - expect(csv[5]).to start_with("UVA,UVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical") - expect(csv[6]).to start_with("UVA,UVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService") - expect(csv[7]).to start_with("UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting") - expect(csv[8]).to start_with("UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") + expect(csv[0]).to eq( + "fabricaAccountId,fabricaId,email,firstName,lastName,type\n", + ) + expect(csv[1]).to start_with( + "VIVA,VIVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical", + ) + expect(csv[2]).to start_with( + "VIVA,VIVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService", + ) + expect(csv[3]).to start_with( + "VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) + expect(csv[4]).to start_with( + "VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) + expect(csv[5]).to start_with( + "UVA,UVA-kristian@example.com,kristian@example.com,Kristian,Garza,technical;secondaryTechnical", + ) + expect(csv[6]).to start_with( + "UVA,UVA-martin@example.com,martin@example.com,Martin,Fenner,service;secondaryService", + ) + expect(csv[7]).to start_with( + "UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) + expect(csv[8]).to start_with( + "UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) end it "returns voting contacts", vcr: false do @@ -117,9 +197,15 @@ expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(3) - expect(csv[0]).to eq("fabricaAccountId,fabricaId,email,firstName,lastName,type\n") - expect(csv[1]).to start_with("VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting") - expect(csv[2]).to start_with("UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting") + expect(csv[0]).to eq( + "fabricaAccountId,fabricaId,email,firstName,lastName,type\n", + ) + expect(csv[1]).to start_with( + "VIVA,VIVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) + expect(csv[2]).to start_with( + "UVA,UVA-robin@example.com,robin@example.com,Robin,Dasler,voting", + ) end it "returns billing contacts", vcr: false do @@ -128,9 +214,15 @@ expect(last_response.status).to eq(200) csv = last_response.body.lines expect(csv.length).to eq(3) - expect(csv[0]).to eq("fabricaAccountId,fabricaId,email,firstName,lastName,type\n") - expect(csv[1]).to start_with("VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") - expect(csv[2]).to start_with("UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling") + expect(csv[0]).to eq( + "fabricaAccountId,fabricaId,email,firstName,lastName,type\n", + ) + expect(csv[1]).to start_with( + "VIVA,VIVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) + expect(csv[2]).to start_with( + "UVA,UVA-trisha@example.com,trisha@example.com,Trisha,Cruse,billing;secondaryBilling", + ) end end end diff --git a/spec/requests/heartbeat_spec.rb b/spec/requests/heartbeat_spec.rb index 56094a87c..63657ccc7 100644 --- a/spec/requests/heartbeat_spec.rb +++ b/spec/requests/heartbeat_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe "/heartbeat", type: :request do diff --git a/spec/requests/index_spec.rb b/spec/requests/index_spec.rb index 5cd26f0ac..ae2b667fe 100644 --- a/spec/requests/index_spec.rb +++ b/spec/requests/index_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe IndexController, type: :request do @@ -6,12 +8,17 @@ describe "content_negotation" do context "application/vnd.jats+xml" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.jats+xml" } + get "/#{doi.doi}", + params: nil, + session: { "HTTP_ACCEPT" => "application/vnd.jats+xml" } expect(last_response.status).to eq(200) - jats = Maremma.from_xml(last_response.body).fetch("element_citation", {}) + jats = + Maremma.from_xml(last_response.body).fetch("element_citation", {}) expect(jats.dig("publication_type")).to eq("data") - expect(jats.dig("data_title")).to eq("Data from: A new malaria agent in African hominids.") + expect(jats.dig("data_title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end @@ -20,21 +27,30 @@ get "/application/vnd.jats+xml/#{doi.doi}" expect(last_response.status).to eq(200) - jats = Maremma.from_xml(last_response.body).fetch("element_citation", {}) + jats = + Maremma.from_xml(last_response.body).fetch("element_citation", {}) expect(jats.dig("publication_type")).to eq("data") - expect(jats.dig("data_title")).to eq("Data from: A new malaria agent in African hominids.") + expect(jats.dig("data_title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end context "application/vnd.datacite.datacite+xml" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.datacite.datacite+xml" } + get "/#{doi.doi}", + params: nil, + session: { + "HTTP_ACCEPT" => "application/vnd.datacite.datacite+xml", + } expect(last_response.status).to eq(200) data = Maremma.from_xml(last_response.body).to_h.fetch("resource", {}) expect(data.dig("xmlns")).to eq("http://datacite.org/schema/kernel-4") expect(data.dig("publisher")).to eq("Dryad Digital Repository") - expect(data.dig("titles", "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(data.dig("titles", "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end @@ -46,7 +62,9 @@ data = Maremma.from_xml(last_response.body).to_h.fetch("resource", {}) expect(data.dig("xmlns")).to eq("http://datacite.org/schema/kernel-4") expect(data.dig("publisher")).to eq("Dryad Digital Repository") - expect(data.dig("titles", "title")).to eq("Data from: A new malaria agent in African hominids.") + expect(data.dig("titles", "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) end end @@ -67,16 +85,31 @@ context "application/vnd.datacite.datacite+xml not found" do it "returns error message" do - get "/xxx", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.datacite.datacite+xml" } + get "/xxx", + params: nil, + session: { + "HTTP_ACCEPT" => "application/vnd.datacite.datacite+xml", + } expect(last_response.status).to eq(404) - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end context "application/vnd.datacite.datacite+json" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.datacite.datacite+json" } + get "/#{doi.doi}", + params: nil, + session: { + "HTTP_ACCEPT" => "application/vnd.datacite.datacite+json", + } expect(last_response.status).to eq(200) expect(json["doi"]).to eq(doi.doi) @@ -94,7 +127,11 @@ context "application/vnd.crosscite.crosscite+json" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.crosscite.crosscite+json" } + get "/#{doi.doi}", + params: nil, + session: { + "HTTP_ACCEPT" => "application/vnd.crosscite.crosscite+json", + } expect(last_response.status).to eq(200) expect(json["doi"]).to eq(doi.doi) @@ -112,7 +149,9 @@ context "application/vnd.schemaorg.ld+json" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.schemaorg.ld+json" } + get "/#{doi.doi}", + params: nil, + session: { "HTTP_ACCEPT" => "application/vnd.schemaorg.ld+json" } expect(last_response.status).to eq(200) expect(json["@type"]).to eq("Dataset") @@ -130,7 +169,8 @@ context "application/ld+json" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/ld+json" } + get "/#{doi.doi}", + params: nil, session: { "HTTP_ACCEPT" => "application/ld+json" } expect(last_response.status).to eq(200) expect(json["@type"]).to eq("Dataset") @@ -148,7 +188,11 @@ context "application/vnd.citationstyles.csl+json" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.citationstyles.csl+json" } + get "/#{doi.doi}", + params: nil, + session: { + "HTTP_ACCEPT" => "application/vnd.citationstyles.csl+json", + } expect(last_response.status).to eq(200) expect(json["type"]).to eq("dataset") @@ -166,7 +210,9 @@ context "application/x-research-info-systems" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/x-research-info-systems" } + get "/#{doi.doi}", + params: nil, + session: { "HTTP_ACCEPT" => "application/x-research-info-systems" } expect(last_response.status).to eq(200) expect(last_response.body).to start_with("TY - DATA") @@ -184,10 +230,13 @@ context "application/x-bibtex" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/x-bibtex" } + get "/#{doi.doi}", + params: nil, session: { "HTTP_ACCEPT" => "application/x-bibtex" } expect(last_response.status).to eq(200) - expect(last_response.body).to start_with("@misc{https://doi.org/#{doi.doi.downcase}") + expect(last_response.body).to start_with( + "@misc{https://doi.org/#{doi.doi.downcase}", + ) end end @@ -196,7 +245,9 @@ get "/application/x-bibtex/#{doi.doi}" expect(last_response.status).to eq(200) - expect(last_response.body).to start_with("@misc{https://doi.org/#{doi.doi.downcase}") + expect(last_response.body).to start_with( + "@misc{https://doi.org/#{doi.doi.downcase}", + ) end end @@ -221,7 +272,8 @@ context "text/x-bibliography" do context "default style" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "text/x-bibliography" } + get "/#{doi.doi}", + params: nil, session: { "HTTP_ACCEPT" => "text/x-bibliography" } expect(last_response.status).to eq(200) expect(last_response.body).to start_with("Ollomo, B.") @@ -229,14 +281,20 @@ end it "header with style" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "text/x-bibliography; style=ieee" } + get "/#{doi.doi}", + params: nil, + session: { "HTTP_ACCEPT" => "text/x-bibliography; style=ieee" } expect(last_response.status).to eq(200) expect(last_response.body).to start_with("B. Ollomo") end it "header with style and locale" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "text/x-bibliography; style=vancouver; locale=de" } + get "/#{doi.doi}", + params: nil, + session: { + "HTTP_ACCEPT" => "text/x-bibliography; style=vancouver; locale=de", + } expect(last_response.status).to eq(200) expect(last_response.body).to start_with("Ollomo B") @@ -263,7 +321,9 @@ context "unknown content type" do it "returns the Doi" do - get "/#{doi.doi}", params: nil, session: { "HTTP_ACCEPT" => "application/vnd.ms-excel" } + get "/#{doi.doi}", + params: nil, + session: { "HTTP_ACCEPT" => "application/vnd.ms-excel" } expect(last_response.status).to eq(303) expect(last_response.headers["Location"]).to eq(doi.url) diff --git a/spec/requests/media_spec.rb b/spec/requests/media_spec.rb index f0eff7e9d..611083132 100644 --- a/spec/requests/media_spec.rb +++ b/spec/requests/media_spec.rb @@ -1,13 +1,27 @@ +# frozen_string_literal: true + require "rails_helper" -describe MediaController, type: :request, order: :defined, elasticsearch: true do +describe MediaController, + type: :request, order: :defined, elasticsearch: true do let(:provider) { create(:provider, symbol: "ADMIN") } let(:client) { create(:client, provider: provider) } let(:datacite_doi) { create(:doi, client: client, type: "DataciteDoi") } - let!(:medias) { create_list(:media, 5, doi: datacite_doi) } + let!(:medias) { create_list(:media, 5, doi: datacite_doi) } let!(:media) { create(:media, doi: datacite_doi) } - let(:bearer) { User.generate_token(role_id: "client_admin", provider_id: provider.symbol.downcase, client_id: client.symbol.downcase) } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:bearer) do + User.generate_token( + role_id: "client_admin", + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end let(:media_type) { "application/xml" } let(:url) { "https://example.org" } @@ -28,7 +42,14 @@ get "/dois/xxx/media", params: nil, session: headers expect(json).not_to be_empty - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end it "returns status code 404" do @@ -41,14 +62,16 @@ describe "GET /dois/DOI/media/:id" do context "when the record exists" do it "returns the media" do - get "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: nil, session: headers expect(json).not_to be_empty expect(json.dig("data", "id")).to eq(media.uid) end it "returns status code 200" do - get "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: nil, session: headers expect(last_response.status).to eq(200) end @@ -56,15 +79,20 @@ context "when the record does not exist" do it "returns status code 404" do - get "/dois/#{datacite_doi.doi}/media/xxxx", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/media/xxxx", + params: nil, session: headers expect(last_response.status).to eq(404) end it "returns a not found message" do - get "/dois/#{datacite_doi.doi}/media/xxxx", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/media/xxxx", + params: nil, session: headers - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end @@ -76,23 +104,22 @@ { "data" => { "type" => "media", - "attributes" => { - "mediaType" => media_type, - "url" => url, - }, + "attributes" => { "mediaType" => media_type, "url" => url }, }, } end it "creates a media record" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers expect(json.dig("data", "attributes", "mediaType")).to eq(media_type) expect(json.dig("data", "attributes", "url")).to eq(url) end it "returns status code 201" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers expect(last_response.status).to eq(201) end @@ -103,22 +130,21 @@ { "data" => { "type" => "media", - "attributes" => { - "mediaType" => nil, - "url" => url, - }, + "attributes" => { "mediaType" => nil, "url" => url }, }, } end it "returns status code 201" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers expect(last_response.status).to eq(201) end it "creates a media record" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers expect(json.dig("data", "attributes", "url")).to eq(url) end @@ -130,16 +156,10 @@ { "data" => { "type" => "media", - "attributes" => { - "mediaType" => media_type, - "url" => url, - }, + "attributes" => { "mediaType" => media_type, "url" => url }, "relationships" => { "doi" => { - "data" => { - "type" => "dois", - "id" => datacite_doi.doi, - }, + "data" => { "type" => "dois", "id" => datacite_doi.doi }, }, }, }, @@ -147,15 +167,19 @@ end it "returns status code 422" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers expect(last_response.status).to eq(422) end it "returns a validation failure message" do - post "/dois/#{datacite_doi.doi}/media", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/media", + params: valid_attributes, session: headers - expect(json["errors"]).to eq([{ "source" => "media_type", "title" => "Is invalid" }]) + expect(json["errors"]).to eq( + [{ "source" => "media_type", "title" => "Is invalid" }], + ) end end end @@ -166,16 +190,10 @@ { "data" => { "type" => "media", - "attributes" => { - "mediaType" => media_type, - "url" => url, - }, + "attributes" => { "mediaType" => media_type, "url" => url }, "relationships" => { "doi" => { - "data" => { - "type" => "dois", - "id" => datacite_doi.doi, - }, + "data" => { "type" => "dois", "id" => datacite_doi.doi }, }, }, }, @@ -183,7 +201,8 @@ end it "updates the record" do - patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: valid_attributes, session: headers + patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: valid_attributes, session: headers expect(json.dig("data", "attributes", "mediaType")).to eq(media_type) expect(json.dig("data", "attributes", "url")).to eq(url) @@ -191,7 +210,8 @@ end it "returns status code 200" do - patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: valid_attributes, session: headers + patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: valid_attributes, session: headers expect(last_response.status).to eq(200) end @@ -203,16 +223,10 @@ { "data" => { "type" => "media", - "attributes" => { - "mediaType" => media_type, - "url" => url, - }, + "attributes" => { "mediaType" => media_type, "url" => url }, "relationships" => { "doi" => { - "data" => { - "type" => "dois", - "id" => datacite_doi.doi, - }, + "data" => { "type" => "dois", "id" => datacite_doi.doi }, }, }, }, @@ -220,15 +234,19 @@ end it "returns status code 422" do - patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: params, session: headers + patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: params, session: headers expect(last_response.status).to eq(422) end it "returns a validation failure message" do - patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: params, session: headers + patch "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: params, session: headers - expect(json["errors"].first).to eq("source" => "url", "title" => "Is invalid") + expect(json["errors"].first).to eq( + "source" => "url", "title" => "Is invalid", + ) end end end @@ -236,7 +254,8 @@ describe "DELETE /dois/DOI/media/:id" do context "when the resources does exist" do it "returns status code 204" do - delete "/dois/#{datacite_doi.doi}/media/#{media.uid}", params: nil, session: headers + delete "/dois/#{datacite_doi.doi}/media/#{media.uid}", + params: nil, session: headers expect(last_response.status).to eq(204) end @@ -244,15 +263,20 @@ context "when the resources doesnt exist" do it "returns status code 404" do - delete "/dois/#{datacite_doi.doi}/media/xxx", params: nil, session: headers + delete "/dois/#{datacite_doi.doi}/media/xxx", + params: nil, session: headers expect(last_response.status).to eq(404) end it "returns a validation failure message" do - delete "/dois/#{datacite_doi.doi}/media/xxx", params: nil, session: headers + delete "/dois/#{datacite_doi.doi}/media/xxx", + params: nil, session: headers - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end diff --git a/spec/requests/members_spec.rb b/spec/requests/members_spec.rb index c597f5eba..30b73c419 100644 --- a/spec/requests/members_spec.rb +++ b/spec/requests/members_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe MembersController, type: :request do @@ -51,7 +53,10 @@ get "/members/xxx" expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end diff --git a/spec/requests/metadata_spec.rb b/spec/requests/metadata_spec.rb index f97deb192..6c8c68b3b 100644 --- a/spec/requests/metadata_spec.rb +++ b/spec/requests/metadata_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe MetadataController, type: :request do @@ -5,10 +7,21 @@ let(:client) { create(:client, provider: provider) } let(:datacite_doi) { create(:doi, client: client, type: "DataciteDoi") } let(:xml) { file_fixture("datacite.xml").read } - let!(:metadatas) { create_list(:metadata, 5, doi: datacite_doi, xml: xml) } + let!(:metadatas) { create_list(:metadata, 5, doi: datacite_doi, xml: xml) } let!(:metadata) { create(:metadata, doi: datacite_doi, xml: xml) } - let(:bearer) { User.generate_token(role_id: "client_admin", provider_id: provider.symbol.downcase, client_id: client.symbol.downcase) } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:bearer) do + User.generate_token( + role_id: "client_admin", + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end # describe 'GET /dois/DOI/metadata' do # it 'returns Metadata' do @@ -28,14 +41,16 @@ describe "GET /dois/DOI/metadata/:id" do context "when the record exists" do it "returns the Metadata" do - get "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", + params: nil, session: headers expect(json).not_to be_empty expect(json.dig("data", "id")).to eq(metadata.uid) end it "returns status code 200" do - get "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", + params: nil, session: headers expect(last_response.status).to eq(200) end @@ -43,15 +58,24 @@ context "when the record does not exist" do it "returns status code 404" do - get "/dois/#{datacite_doi.doi}/metadata/xxxx", params: nil, session: headers + get "/dois/#{datacite_doi.doi}/metadata/xxxx", + params: nil, session: headers expect(last_response.status).to eq(404) end it "returns a not found message" do - get "/dois/#{datacite_doi.doi}/metadata/xxxx", params: nil, session: headers - - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + get "/dois/#{datacite_doi.doi}/metadata/xxxx", + params: nil, session: headers + + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end end @@ -62,46 +86,48 @@ { "data" => { "type" => "metadata", - "attributes" => { - "xml" => Base64.strict_encode64(xml), - }, + "attributes" => { "xml" => Base64.strict_encode64(xml) }, }, } end it "creates a metadata record" do - post "/dois/#{datacite_doi.doi}/metadata", params: valid_attributes, session: headers - - expect(Base64.decode64(json.dig("data", "attributes", "xml"))).to eq(xml) - expect(json.dig("data", "attributes", "namespace")).to eq("http://datacite.org/schema/kernel-4") + post "/dois/#{datacite_doi.doi}/metadata", + params: valid_attributes, session: headers + + expect(Base64.decode64(json.dig("data", "attributes", "xml"))).to eq( + xml, + ) + expect(json.dig("data", "attributes", "namespace")).to eq( + "http://datacite.org/schema/kernel-4", + ) end it "returns status code 201" do - post "/dois/#{datacite_doi.doi}/metadata", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/metadata", + params: valid_attributes, session: headers expect(last_response.status).to eq(201) end end context "when the xml is missing" do - let(:not_valid_attributes) do - { - "data" => { - "type" => "metadata", - }, - } - end + let(:not_valid_attributes) { { "data" => { "type" => "metadata" } } } it "returns status code 422" do - post "/dois/#{datacite_doi.doi}/metadata", params: not_valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/metadata", + params: not_valid_attributes, session: headers expect(last_response.status).to eq(422) end it "returns a validation failure message" do - post "/dois/#{datacite_doi.doi}/metadata", params: not_valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/metadata", + params: not_valid_attributes, session: headers - expect(json["errors"]).to eq([{ "source" => "xml", "title" => "Can't be blank" }]) + expect(json["errors"]).to eq( + [{ "source" => "xml", "title" => "Can't be blank" }], + ) end end @@ -111,15 +137,10 @@ { "data" => { "type" => "metadata", - "attributes" => { - "xml" => Base64.strict_encode64(xml), - }, + "attributes" => { "xml" => Base64.strict_encode64(xml) }, "relationships" => { "doi" => { - "data" => { - "type" => "dois", - "id" => datacite_doi.doi, - }, + "data" => { "type" => "dois", "id" => datacite_doi.doi }, }, }, }, @@ -127,7 +148,8 @@ end it "returns status code 201" do - post "/dois/#{datacite_doi.doi}/metadata", params: valid_attributes, session: headers + post "/dois/#{datacite_doi.doi}/metadata", + params: valid_attributes, session: headers expect(last_response.status).to eq(201) end @@ -175,7 +197,8 @@ describe "DELETE /dois/DOI/metadata/:id" do context "when the resources does exist" do it "returns status code 204" do - delete "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", params: nil, session: headers + delete "/dois/#{datacite_doi.doi}/metadata/#{metadata.uid}", + params: nil, session: headers expect(last_response.status).to eq(204) end @@ -183,15 +206,24 @@ context "when the resources doesnt exist" do it "returns status code 404" do - delete "/dois/#{datacite_doi.doi}/metadata/xxx", params: nil, session: headers + delete "/dois/#{datacite_doi.doi}/metadata/xxx", + params: nil, session: headers expect(last_response.status).to eq(404) end it "returns a validation failure message" do - delete "/dois/#{datacite_doi.doi}/metadata/xxx", params: nil, session: headers - - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + delete "/dois/#{datacite_doi.doi}/metadata/xxx", + params: nil, session: headers + + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end end diff --git a/spec/requests/old_events_spec.rb b/spec/requests/old_events_spec.rb index 9814824a3..9a22256fc 100644 --- a/spec/requests/old_events_spec.rb +++ b/spec/requests/old_events_spec.rb @@ -4,11 +4,18 @@ describe EventsController, type: :request, elasticsearch: true, vcr: true do let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) } + let(:client) do + create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) + end before(:each) do - allow(Time).to receive(:now).and_return(Time.mktime(2015, 4, 8)) - allow(Time.zone).to receive(:now).and_return(Time.mktime(2015, 4, 8)) + allow(Time).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) + allow(Time.zone).to receive(:now).and_return(Time.mktime(2_015, 4, 8)) end let(:event) { build(:event) } @@ -16,7 +23,8 @@ # Successful response from creating via the API. let(:success) do - { "id" => event.uuid, + { + "id" => event.uuid, "type" => "events", "attributes" => { "subj-id" => "http://www.citeulike.org/user/dbogartoit", @@ -28,42 +36,60 @@ "total" => 1, "license" => "https://creativecommons.org/publicdomain/zero/1.0/", "occurred-at" => "2015-04-08T00:00:00.000Z", - "subj" => { "@id" => "http://www.citeulike.org/user/dbogartoit", - "@type" => "CreativeWork", - "author" => [{ "givenName" => "dbogartoit" }], - "name" => "CiteULike bookmarks for user dbogartoit", - "publisher" => { "@type" => "Organization", "name" => "CiteULike" }, - "periodical" => { "@type" => "Periodical", "@id" => "https://doi.org/10.13039/100011326", "name" => "CiteULike", "issn" => "9812-847X" }, - "funder" => { "@type" => "Organization", "@id" => "https://doi.org/10.13039/100011326", "name" => "CiteULike" }, - "version" => "1.0", - "proxy-identifiers" => ["10.13039/100011326"], - "date-published" => "2006-06-13T16:14:19Z", - "date-modified" => "2006-06-13T16:14:19Z", - "url" => "http://www.citeulike.org/user/dbogartoit" }, + "subj" => { + "@id" => "http://www.citeulike.org/user/dbogartoit", + "@type" => "CreativeWork", + "author" => [{ "givenName" => "dbogartoit" }], + "name" => "CiteULike bookmarks for user dbogartoit", + "publisher" => { "@type" => "Organization", "name" => "CiteULike" }, + "periodical" => { + "@type" => "Periodical", + "@id" => "https://doi.org/10.13039/100011326", + "name" => "CiteULike", + "issn" => "9812-847X", + }, + "funder" => { + "@type" => "Organization", + "@id" => "https://doi.org/10.13039/100011326", + "name" => "CiteULike", + }, + "version" => "1.0", + "proxy-identifiers" => %w[10.13039/100011326], + "date-published" => "2006-06-13T16:14:19Z", + "date-modified" => "2006-06-13T16:14:19Z", + "url" => "http://www.citeulike.org/user/dbogartoit", + }, "obj" => {}, - } } + }, + } end let(:token) { User.generate_token(role_id: "staff_admin") } let(:uuid) { SecureRandom.uuid } let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json", - "HTTP_AUTHORIZATION" => "Bearer #{token}" } + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer #{token}", + } end context "create" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subj-id" => event.subj_id, - "subj" => event.subj, - "obj-id" => event.obj_id, - "relation-type-id" => event.relation_type_id, - "source-id" => event.source_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subj-id" => event.subj_id, + "subj" => event.subj, + "obj-id" => event.obj_id, + "relation-type-id" => event.relation_type_id, + "source-id" => event.source_id, + "source-token" => event.source_token, + }, + }, + } end context "as admin user" do @@ -78,17 +104,23 @@ end context "with very long url" do - let(:url) { "http://navigator.eumetsat.int/soapservices/cswstartup?service=csw&version=2.0.2&request=getrecordbyid&outputschema=http%3A%2F%2Fwww.isotc211.org%2F2005%2Fgmd&id=eo%3Aeum%3Adat%3Amult%3Arac-m11-iasia" } + let(:url) do + "http://navigator.eumetsat.int/soapservices/cswstartup?service=csw&version=2.0.2&request=getrecordbyid&outputschema=http%3A%2F%2Fwww.isotc211.org%2F2005%2Fgmd&id=eo%3Aeum%3Adat%3Amult%3Arac-m11-iasia" + end let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => event.subj_id, - "subj" => event.subj, - "obj-id" => url, - "relation-type-id" => event.relation_type_id, - "source-id" => "datacite-url", - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => event.subj_id, + "subj" => event.subj, + "obj-id" => url, + "relation-type-id" => event.relation_type_id, + "source-id" => "datacite-url", + "source-token" => event.source_token, + }, + }, + } end it "JSON" do @@ -108,7 +140,14 @@ post uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -120,72 +159,99 @@ post uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "without source-token" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "subj-id" => event.subj_id, - "source-id" => event.source_id, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "subj-id" => event.subj_id, + "source-id" => event.source_id, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source token can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source token can't be blank" }], + ) expect(json["data"]).to be_nil end end context "without source-id" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "subj-id" => event.subj_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "subj-id" => event.subj_id, + "source-token" => event.source_token, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source can't be blank" }], + ) expect(json["data"]).to be_blank end end context "without subj-id" do let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "source-id" => event.source_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "uuid" => uuid, + "source-id" => event.source_id, + "source-token" => event.source_token, + }, + }, + } end it "JSON" do post uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Subj can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Subj can't be blank" }], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -199,11 +265,12 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "attributes" => { - "uuid" => uuid, - "source-token" => "123", - } } } + { + "event" => { + "type" => "events", + "attributes" => { "uuid" => uuid, "source-token" => "123" }, + }, + } end it "JSON" do @@ -216,7 +283,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do post uri, params: params, session: headers @@ -245,16 +317,32 @@ context "with registrant information" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.18713/jimis-170117-1-2", - "subj" => { "@id": "https://doi.org/10.18713/jimis-170117-1-2", "@type": "ScholarlyArticle", "datePublished": "2017", "proxyIdentifiers": [], "registrantId": "datacite.inist.umr7300" }, - "obj" => { "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", "@type": "ScholarlyArticle", "datePublished": "2013-09", "proxyIdentifiers": ["13646826"], "registrantId": "datacite.crossref.citations" }, - "obj-id" => "https://doi.org/10.1016/j.jastp.2013.05.001", - "relation-type-id" => "references", - "source-id" => "datacite-crossref", - "source-token" => "source-token", - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.18713/jimis-170117-1-2", + "subj" => { + "@id": "https://doi.org/10.18713/jimis-170117-1-2", + "@type": "ScholarlyArticle", + "datePublished": "2017", + "proxyIdentifiers": [], + "registrantId": "datacite.inist.umr7300", + }, + "obj" => { + "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", + "@type": "ScholarlyArticle", + "datePublished": "2013-09", + "proxyIdentifiers": %w[13646826], + "registrantId": "datacite.crossref.citations", + }, + "obj-id" => "https://doi.org/10.1016/j.jastp.2013.05.001", + "relation-type-id" => "references", + "source-id" => "datacite-crossref", + "source-token" => "source-token", + }, + }, + } end it "has registrant aggregation" do @@ -263,30 +351,50 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).not_to eq(event.uuid) - expect(json.dig("data", "attributes", "obj-id")).to eq("https://doi.org/10.1016/j.jastp.2013.05.001") + expect(json.dig("data", "attributes", "obj-id")).to eq( + "https://doi.org/10.1016/j.jastp.2013.05.001", + ) Event.import sleep 2 get uri, params: nil, session: headers expect(json.dig("meta", "registrants", 0, "count")).to eq(1) - expect(json.dig("meta", "registrants", 0, "id")).to eq("datacite.crossref.citations") + expect(json.dig("meta", "registrants", 0, "id")).to eq( + "datacite.crossref.citations", + ) end end context "with nested attributes" do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.18713/jimis-170117-1-2", - "subj" => { "@id": "https://doi.org/10.18713/jimis-170117-1-2", "@type": "ScholarlyArticle", "datePublished": "2017", "proxyIdentifiers": [], "registrantId": "datacite.inist.umr7300" }, - "obj" => { "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", "@type": "ScholarlyArticle", "datePublished": "2013-09", "proxyIdentifiers": ["13646826"], "registrantId": "datacite.crossref.citations" }, - "obj-id" => "https://doi.org/10.1016/j.jastp.2013.05.001", - "relation-type-id" => "references", - "source-id" => "datacite-crossref", - "source-token" => "source-token", - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.18713/jimis-170117-1-2", + "subj" => { + "@id": "https://doi.org/10.18713/jimis-170117-1-2", + "@type": "ScholarlyArticle", + "datePublished": "2017", + "proxyIdentifiers": [], + "registrantId": "datacite.inist.umr7300", + }, + "obj" => { + "@id": "https://doi.org/10.1016/j.jastp.2013.05.001", + "@type": "ScholarlyArticle", + "datePublished": "2013-09", + "proxyIdentifiers": %w[13646826], + "registrantId": "datacite.crossref.citations", + }, + "obj-id" => "https://doi.org/10.1016/j.jastp.2013.05.001", + "relation-type-id" => "references", + "source-id" => "datacite-crossref", + "source-token" => "source-token", + }, + }, + } end it "are correctly stored" do @@ -304,13 +412,17 @@ context "create crossref doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.7554/elife.01567", - "source-id" => "crossref-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.7554/elife.01567", + "source-id" => "crossref-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "registered" do @@ -319,20 +431,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.7554/elife.01567") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.7554/elife.01567", + ) end end context "create crossref doi not found", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.3389/fmicb.2019.01425", - "source-id" => "crossref-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.3389/fmicb.2019.01425", + "source-id" => "crossref-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "not registered" do @@ -341,20 +459,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.3389/fmicb.2019.01425") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.3389/fmicb.2019.01425", + ) end end context "create medra doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.3280/ecag2018-001005", - "source-id" => "medra-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.3280/ecag2018-001005", + "source-id" => "medra-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "registered" do @@ -363,20 +487,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.3280/ecag2018-001005") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.3280/ecag2018-001005", + ) end end context "create kisti doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.5012/bkcs.2013.34.10.2889", - "source-id" => "kisti-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.5012/bkcs.2013.34.10.2889", + "source-id" => "kisti-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "registered" do @@ -385,20 +515,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.5012/bkcs.2013.34.10.2889") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.5012/bkcs.2013.34.10.2889", + ) end end context "create jalc doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.1241/johokanri.39.979", - "source-id" => "jalc-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.1241/johokanri.39.979", + "source-id" => "jalc-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "registered" do @@ -407,20 +543,26 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.1241/johokanri.39.979") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.1241/johokanri.39.979", + ) end end context "create op doi", vcr: true do let(:uri) { "/events" } let(:params) do - { "data" => { "type" => "events", - "attributes" => { - "subj-id" => "https://doi.org/10.2903/j.efsa.2018.5239", - "source-id" => "op-import", - "relation-type-id" => nil, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "attributes" => { + "subj-id" => "https://doi.org/10.2903/j.efsa.2018.5239", + "source-id" => "op-import", + "relation-type-id" => nil, + "source-token" => event.source_token, + }, + }, + } end it "registered" do @@ -429,23 +571,29 @@ expect(last_response.status).to eq(201) expect(json["errors"]).to be_nil expect(json.dig("data", "id")).to be_present - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/10.2903/j.efsa.2018.5239") + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/10.2903/j.efsa.2018.5239", + ) end end context "upsert" do let(:uri) { "/events/#{event.uuid}" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subj-id" => event.subj_id, - "subj" => event.subj, - "obj-id" => event.obj_id, - "relation-type-id" => event.relation_type_id, - "source-id" => event.source_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subj-id" => event.subj_id, + "subj" => event.subj, + "obj-id" => event.obj_id, + "relation-type-id" => event.relation_type_id, + "source-id" => event.source_id, + "source-token" => event.source_token, + }, + }, + } end context "as admin user" do @@ -466,7 +614,14 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -478,72 +633,97 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "without source-token" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "subj-id" => event.subj_id, - "source-id" => event.source_id, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "subj-id" => event.subj_id, "source-id" => event.source_id + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source token can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source token can't be blank" }], + ) expect(json["data"]).to be_nil end end context "without source-id" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "subj-id" => event.subj_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "subj-id" => event.subj_id, "source-token" => event.source_token + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Source can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Source can't be blank" }], + ) expect(json["data"]).to be_blank end end context "without subj-id" do let(:params) do - { "data" => { "type" => "events", - "id" => uuid, - "attributes" => { - "source-id" => event.source_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => uuid, + "attributes" => { + "source-id" => event.source_id, + "source-token" => event.source_token, + }, + }, + } end it "JSON" do put uri, params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "status" => 422, "title" => "Subj can't be blank" }]) + expect(json["errors"]).to eq( + [{ "status" => 422, "title" => "Subj can't be blank" }], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -557,11 +737,13 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "id" => uuid, - "attributes" => { - "source-token" => "123", - } } } + { + "event" => { + "type" => "events", + "id" => uuid, + "attributes" => { "source-token" => "123" }, + }, + } end it "JSON" do @@ -574,7 +756,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do put uri, params: params, session: headers @@ -606,16 +793,20 @@ let(:uri) { "/events/#{event.uuid}?include=dois" } let(:params) do - { "data" => { "type" => "events", - "id" => event.uuid, - "attributes" => { - "subj-id" => event.subj_id, - "subj" => event.subj, - "obj-id" => event.obj_id, - "relation-type-id" => event.relation_type_id, - "source-id" => event.source_id, - "source-token" => event.source_token, - } } } + { + "data" => { + "type" => "events", + "id" => event.uuid, + "attributes" => { + "subj-id" => event.subj_id, + "subj" => event.subj, + "obj-id" => event.obj_id, + "relation-type-id" => event.relation_type_id, + "source-id" => event.source_id, + "source-token" => event.source_token, + }, + }, + } end context "as admin user" do @@ -635,7 +826,14 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -647,15 +845,24 @@ put uri, params: params, session: headers expect(last_response.status).to eq(403) - expect(json["errors"]).to eq([{ "status" => "403", "title" => "You are not authorized to access this resource." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "403", + "title" => "You are not authorized to access this resource.", + }, + ], + ) expect(json["data"]).to be_blank end end context "with wrong API token" do let(:headers) do - { "HTTP_ACCEPT" => "application/vnd.api+json", - "HTTP_AUTHORIZATION" => "Bearer 12345678" } + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer 12345678", + } end it "JSON" do @@ -669,11 +876,13 @@ context "with missing data param" do let(:params) do - { "event" => { "type" => "events", - "id" => uuid, - "attributes" => { - "source-token" => "123", - } } } + { + "event" => { + "type" => "events", + "id" => uuid, + "attributes" => { "source-token" => "123" }, + }, + } end it "JSON" do @@ -686,7 +895,12 @@ end context "with params in wrong format" do - let(:params) { { "data" => "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail" } } + let(:params) do + { + "data" => + "10.1371/journal.pone.0036790 2012-05-15 New Dromaeosaurids (Dinosauria: Theropoda) from the Lower Cretaceous of Utah, and the Evolution of the Dromaeosaurid Tail", + } + end it "JSON" do put uri, params: params, session: headers @@ -703,7 +917,14 @@ context "show" do let(:doi) { create(:doi, client: client, aasm_state: "findable") } let(:source_doi) { create(:doi, client: client, aasm_state: "findable") } - let!(:event) { create(:event_for_datacite_crossref, subj_id: "https://doi.org/#{doi.doi}", obj_id: "https://doi.org/#{source_doi.doi}", relation_type_id: "is-referenced-by") } + let!(:event) do + create( + :event_for_datacite_crossref, + subj_id: "https://doi.org/#{doi.doi}", + obj_id: "https://doi.org/#{source_doi.doi}", + relation_type_id: "is-referenced-by", + ) + end let(:uri) { "/events/#{event.uuid}?include=subj,obj" } @@ -718,9 +939,15 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "relation-type-id")).to eq("is-referenced-by") - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/#{doi.doi.downcase}") - expect(json.dig("data", "attributes", "obj-id")).to eq("https://doi.org/#{source_doi.doi.downcase}") + expect(json.dig("data", "attributes", "relation-type-id")).to eq( + "is-referenced-by", + ) + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/#{doi.doi.downcase}", + ) + expect(json.dig("data", "attributes", "obj-id")).to eq( + "https://doi.org/#{source_doi.doi.downcase}", + ) end end @@ -731,9 +958,15 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "relation-type-id")).to eq("is-referenced-by") - expect(json.dig("data", "attributes", "subj-id")).to eq("https://doi.org/#{doi.doi.downcase}") - expect(json.dig("data", "attributes", "obj-id")).to eq("https://doi.org/#{source_doi.doi.downcase}") + expect(json.dig("data", "attributes", "relation-type-id")).to eq( + "is-referenced-by", + ) + expect(json.dig("data", "attributes", "subj-id")).to eq( + "https://doi.org/#{doi.doi.downcase}", + ) + expect(json.dig("data", "attributes", "obj-id")).to eq( + "https://doi.org/#{source_doi.doi.downcase}", + ) end end @@ -744,7 +977,14 @@ get uri, params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"]).to eq([{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json["errors"]).to eq( + [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) expect(json["data"]).to be_nil end end @@ -759,8 +999,11 @@ # Exclude the token header. let(:headers) do - { "HTTP_ACCEPT" => "application/json", - "HTTP_USER_AGENT" => "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" } + { + "HTTP_ACCEPT" => "application/json", + "HTTP_USER_AGENT" => + "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 5X Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.96 Mobile Safari/537.36 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)", + } end it "json" do diff --git a/spec/requests/prefixes_spec.rb b/spec/requests/prefixes_spec.rb index 78a3bed89..4d411ef45 100644 --- a/spec/requests/prefixes_spec.rb +++ b/spec/requests/prefixes_spec.rb @@ -1,10 +1,17 @@ +# frozen_string_literal: true + require "rails_helper" describe PrefixesController, type: :request, elasticsearch: true do let!(:prefixes) { create_list(:prefix, 10) } let(:bearer) { User.generate_token } let(:prefix_id) { prefixes.first.uid } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end describe "GET /prefixes" do before do @@ -54,7 +61,10 @@ get "/prefixes/10.1234", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end @@ -63,7 +73,10 @@ get "/prefixes/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end @@ -73,7 +86,9 @@ patch "/prefixes/#{prefix_id}", params: nil, session: headers expect(last_response.status).to eq(405) - expect(json.dig("errors")).to eq([{ "status" => "405", "title" => "Method not allowed" }]) + expect(json.dig("errors")).to eq( + [{ "status" => "405", "title" => "Method not allowed" }], + ) end end @@ -86,12 +101,7 @@ context "when the request is valid" do let!(:provider) { create(:provider) } let(:valid_attributes) do - { - "data" => { - "type" => "prefixes", - "id" => "10.17177", - }, - } + { "data" => { "type" => "prefixes", "id" => "10.17177" } } end it "returns status code 201" do @@ -106,10 +116,7 @@ let(:not_valid_attributes) do { "data" => { - "type" => "prefixes", - "attributes" => { - "uid" => "dsds10.33342", - }, + "type" => "prefixes", "attributes" => { "uid" => "dsds10.33342" } }, } end @@ -118,7 +125,9 @@ post "/prefixes", params: not_valid_attributes, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "uid", "title" => "Can't be blank") + expect(json["errors"].first).to eq( + "source" => "uid", "title" => "Can't be blank", + ) end end end diff --git a/spec/requests/provider_prefixes_spec.rb b/spec/requests/provider_prefixes_spec.rb index a178debae..2478fef2f 100644 --- a/spec/requests/provider_prefixes_spec.rb +++ b/spec/requests/provider_prefixes_spec.rb @@ -1,14 +1,30 @@ +# frozen_string_literal: true + require "rails_helper" describe ProviderPrefixesController, type: :request, elasticsearch: true do let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM") } - let(:provider) { create(:provider, consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION", password_input: "12345") } + let(:provider) do + create( + :provider, + consortium: consortium, + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + password_input: "12345", + ) + end let(:prefix) { create(:prefix) } - let!(:provider_prefixes) { create_list(:provider_prefix, 3, provider: provider) } + let!(:provider_prefixes) do + create_list(:provider_prefix, 3, provider: provider) + end let!(:provider_prefixes2) { create_list(:provider_prefix, 2) } let(:provider_prefix) { create(:provider_prefix) } let(:bearer) { User.generate_token(role_id: "staff_admin") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end before do ProviderPrefix.import @@ -19,19 +35,39 @@ describe "GET /provider-prefixes by consortium" do it "returns provider-prefixes" do - get "/provider-prefixes?consortium-id=#{consortium.symbol.downcase}", params: nil, session: headers + get "/provider-prefixes?consortium-id=#{consortium.symbol.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(3) - expect(json.dig("meta", "years")).to eq([{ "count" => 3, "id" => "2020", "title" => "2020" }]) - expect(json.dig("meta", "states")).to eq([{ "count" => 3, "id" => "without-repository", "title" => "Without Repository" }]) - expect(json.dig("meta", "providers")).to eq([{ "count" => 3, "id" => provider.symbol.downcase, "title" => "My provider" }]) + expect(json.dig("meta", "years")).to eq( + [{ "count" => 3, "id" => "2020", "title" => "2020" }], + ) + expect(json.dig("meta", "states")).to eq( + [ + { + "count" => 3, + "id" => "without-repository", + "title" => "Without Repository", + }, + ], + ) + expect(json.dig("meta", "providers")).to eq( + [ + { + "count" => 3, + "id" => provider.symbol.downcase, + "title" => "My provider", + }, + ], + ) end end describe "GET /provider-prefixes by provider" do it "returns provider-prefixes" do - get "/provider-prefixes?provider-id=#{provider.symbol.downcase}", params: nil, session: headers + get "/provider-prefixes?provider-id=#{provider.symbol.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(3) @@ -40,7 +76,8 @@ describe "GET /provider-prefixes by prefix" do it "returns provider-prefixes" do - get "/provider-prefixes?prefix-id=#{provider_prefixes.first.prefix_id}", params: nil, session: headers + get "/provider-prefixes?prefix-id=#{provider_prefixes.first.prefix_id}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) @@ -49,7 +86,10 @@ describe "GET /provider-prefixes by provider and prefix" do it "returns provider-prefixes" do - get "/provider-prefixes?provider-id=#{provider.symbol.downcase}&prefix-id=#{provider_prefixes.first.prefix_id}", params: nil, session: headers + get "/provider-prefixes?provider-id=#{ + provider.symbol.downcase + }&prefix-id=#{provider_prefixes.first.prefix_id}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) @@ -77,7 +117,8 @@ describe "GET /provider-prefixes/:uid" do context "when the record exists" do it "returns the provider-prefix" do - get "/provider-prefixes/#{provider_prefix.uid}", params: nil, session: headers + get "/provider-prefixes/#{provider_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data", "id")).to eq(provider_prefix.uid) @@ -89,17 +130,23 @@ get "/provider-prefixes/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "PATCH /provider-prefixes/:uid" do it "returns method not supported error" do - patch "/provider-prefixes/#{provider_prefix.uid}", params: nil, session: headers + patch "/provider-prefixes/#{provider_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(405) - expect(json.dig("errors")).to eq([{ "status" => "405", "title" => "Method not allowed" }]) + expect(json.dig("errors")).to eq( + [{ "status" => "405", "title" => "Method not allowed" }], + ) end end @@ -111,17 +158,9 @@ "type" => "provider-prefixes", "relationships": { "provider": { - "data": { - "type": "provider", - "id": provider.symbol.downcase, - }, - }, - "prefix": { - "data": { - "type": "prefix", - "id": prefix.uid, - }, + "data": { "type": "provider", "id": provider.symbol.downcase }, }, + "prefix": { "data": { "type": "prefix", "id": prefix.uid } }, }, }, } @@ -138,18 +177,17 @@ context "when the request is invalid" do let!(:provider) { create(:provider) } let(:not_valid_attributes) do - { - "data" => { - "type" => "provider-prefixes", - }, - } + { "data" => { "type" => "provider-prefixes" } } end it "returns status code 422" do - post "/provider-prefixes", params: not_valid_attributes, session: headers + post "/provider-prefixes", + params: not_valid_attributes, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "provider", "title" => "Must exist") + expect(json["errors"].first).to eq( + "source" => "provider", "title" => "Must exist", + ) end end end @@ -163,7 +201,8 @@ end it "deletes the prefix" do - delete "/provider-prefixes/#{provider_prefix.uid}", params: nil, session: headers + delete "/provider-prefixes/#{provider_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(204) end end diff --git a/spec/requests/providers_spec.rb b/spec/requests/providers_spec.rb index 42ed3b1a4..77dbfeb09 100644 --- a/spec/requests/providers_spec.rb +++ b/spec/requests/providers_spec.rb @@ -1,23 +1,48 @@ +# frozen_string_literal: true + require "rails_helper" describe ProvidersController, type: :request, elasticsearch: true do let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM") } - let(:provider) { create(:provider, consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION") } - let(:token) { User.generate_token(role_id: "consortium_admin", provider_id: consortium.symbol.downcase) } + let(:provider) do + create( + :provider, + consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION", + ) + end + let(:token) do + User.generate_token( + role_id: "consortium_admin", provider_id: consortium.symbol.downcase, + ) + end let(:admin_token) { User.generate_token } let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "BL", - "name" => "British Library", - "displayName" => "British Library", - "systemEmail" => "bob@example.com", - "website" => "https://www.bl.uk", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "BL", + "name" => "British Library", + "displayName" => "British Library", + "systemEmail" => "bob@example.com", + "website" => "https://www.bl.uk", + "country" => "GB", + }, + }, + } + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + token, + } + end + let(:admin_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + admin_token, + } end - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + token } } - let(:admin_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + admin_token } } describe "GET /providers" do let!(:providers) { create_list(:provider, 3) } @@ -37,8 +62,15 @@ end describe "GET /providers for consortium" do - let(:consortium) { create(:provider, symbol: "dc", role_name: "ROLE_CONSORTIUM") } - let!(:consortium_organization) { create(:provider, consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION") } + let(:consortium) do + create(:provider, symbol: "dc", role_name: "ROLE_CONSORTIUM") + end + let!(:consortium_organization) do + create( + :provider, + consortium: consortium, role_name: "ROLE_CONSORTIUM_ORGANIZATION", + ) + end let!(:provider) { create(:provider) } before do @@ -58,7 +90,8 @@ describe "GET /providers/:id" do context "when the record exists" do it "returns the provider" do - get "/providers/#{provider.symbol.downcase}", params: nil, session: headers + get "/providers/#{provider.symbol.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json).not_to be_empty @@ -67,19 +100,32 @@ end it "returns the provider info for member page" do - get "/providers/#{provider.symbol.downcase}", params: nil, session: headers - - expect(json["data"]["attributes"]["twitterHandle"]).to eq(provider.twitter_handle) - expect(json["data"]["attributes"]["billingInformation"]).to eq(provider.billing_information) + get "/providers/#{provider.symbol.downcase}", + params: nil, session: headers + + expect(json["data"]["attributes"]["twitterHandle"]).to eq( + provider.twitter_handle, + ) + expect(json["data"]["attributes"]["billingInformation"]).to eq( + provider.billing_information, + ) expect(json["data"]["attributes"]["rorId"]).to eq(provider.ror_id) end end context "get provider type ROLE_CONTRACTUAL_PROVIDER and check it works " do - let(:provider) { create(:provider, role_name: "ROLE_CONTRACTUAL_PROVIDER", name: "Contractor", symbol: "CONTRCTR") } + let(:provider) do + create( + :provider, + role_name: "ROLE_CONTRACTUAL_PROVIDER", + name: "Contractor", + symbol: "CONTRCTR", + ) + end it "get provider" do - get "/providers/#{provider.symbol.downcase}", params: nil, session: headers + get "/providers/#{provider.symbol.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data", "id")).to eq(provider.symbol.downcase) @@ -91,13 +137,20 @@ get "/providers/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end context "text/csv" do it "returns status code 200" do - get "/providers/", params: nil, session: { "HTTP_ACCEPT" => "text/csv", "Authorization" => "Bearer " + token } + get "/providers/", + params: nil, + session: { + "HTTP_ACCEPT" => "text/csv", "Authorization" => "Bearer " + token + } expect(last_response.status).to eq(200) end @@ -105,8 +158,8 @@ end describe "GET /providers/:id meta" do - let(:provider) { create(:provider) } - let(:client) { create(:client, provider: provider) } + let(:provider) { create(:provider) } + let(:client) { create(:client, provider: provider) } let!(:dois) { create_list(:doi, 3, client: client, aasm_state: "findable") } before do @@ -117,7 +170,8 @@ end it "returns provider" do - get "/providers/#{provider.symbol.downcase}", params: nil, session: headers + get "/providers/#{provider.symbol.downcase}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data", "id")).to eq(provider.symbol.downcase) @@ -150,9 +204,15 @@ end describe "GET /providers/:id/stats" do - let(:provider) { create(:provider) } - let(:client) { create(:client, provider: provider) } - let!(:dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let(:provider) { create(:provider) } + let(:client) { create(:client, provider: provider) } + let!(:dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end before do Provider.import @@ -162,40 +222,51 @@ end it "returns provider" do - get "/providers/#{provider.symbol.downcase}/stats", params: nil, session: headers + get "/providers/#{provider.symbol.downcase}/stats", + params: nil, session: headers expect(last_response.status).to eq(200) - expect(json["clients"]).to eq([{ "count" => 1, "id" => "2020", "title" => "2020" }]) + expect(json["clients"]).to eq( + [{ "count" => 1, "id" => "2020", "title" => "2020" }], + ) # expect(json["resourceTypes"]).to eq([{"count"=>3, "id"=>"dataset", "title"=>"Dataset"}]) - expect(json["dois"]).to eq([{ "count" => 3, "id" => "2020", "title" => "2020" }]) + expect(json["dois"]).to eq( + [{ "count" => 3, "id" => "2020", "title" => "2020" }], + ) end end describe "POST /providers" do context "request is valid" do - let(:logo) { "data:image/png;base64," + Base64.strict_encode64(file_fixture("bl.png").read) } + let(:logo) do + "data:image/png;base64," + + Base64.strict_encode64(file_fixture("bl.png").read) + end let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "BL", - "name" => "British Library", - "displayName" => "British Library", - "memberType" => "consortium_organization", - "logo" => logo, - "website" => "https://www.bl.uk", - "salesforceId" => "abc012345678901234", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": consortium.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "BL", + "name" => "British Library", + "displayName" => "British Library", + "memberType" => "consortium_organization", + "logo" => logo, + "website" => "https://www.bl.uk", + "salesforceId" => "abc012345678901234", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { + "type": "providers", "id": consortium.symbol.downcase + }, + }, + }, + }, + } end it "creates a provider" do @@ -203,15 +274,28 @@ expect(last_response.status).to eq(200) expect(json.dig("data", "attributes", "name")).to eq("British Library") - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") - expect(json.dig("data", "relationships", "consortium", "data", "id")).to eq(consortium.symbol.downcase) + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) + expect( + json.dig("data", "relationships", "consortium", "data", "id"), + ).to eq(consortium.symbol.downcase) end end context "request ability check" do let!(:providers) { create_list(:provider, 2) } - let(:last_provider_token) { User.generate_token(provider_id: providers.last.symbol, role_id: "provider_admin") } - let(:headers_last) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + last_provider_token } } + let(:last_provider_token) do + User.generate_token( + provider_id: providers.last.symbol, role_id: "provider_admin", + ) + end + let(:headers_last) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + last_provider_token, + } + end before do Provider.import @@ -219,9 +303,12 @@ end it "has no permission" do - get "/providers/#{providers.first.symbol}", params: nil, session: headers_last + get "/providers/#{providers.first.symbol}", + params: nil, session: headers_last - expect(json["data"].dig("attributes", "symbol")).to eq(providers.first.symbol) + expect(json["data"].dig("attributes", "symbol")).to eq( + providers.first.symbol, + ) expect(json["data"].dig("attributes", "billingInformation")).to eq(nil) expect(json["data"].dig("attributes", "twitterHandle")).to eq(nil) end @@ -229,70 +316,96 @@ context "create provider member_role contractual_member" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "FG", - "name" => "Figshare", - "displayName" => "Figshare", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "website" => "https://www.bl.uk", - "memberType" => "contractual_member", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "FG", + "name" => "Figshare", + "displayName" => "Figshare", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "website" => "https://www.bl.uk", + "memberType" => "contractual_member", + "country" => "GB", + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) expect(json.dig("data", "attributes", "name")).to eq("Figshare") - expect(json.dig("data", "attributes", "memberType")).to eq("contractual_member") + expect(json.dig("data", "attributes", "memberType")).to eq( + "contractual_member", + ) end end context "create provider member_role consortium_organization" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "FG", - "name" => "Figshare", - "displayName" => "Figshare", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "website" => "https://www.bl.uk", - "memberType" => "consortium_organization", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": consortium.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "FG", + "name" => "Figshare", + "displayName" => "Figshare", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "website" => "https://www.bl.uk", + "memberType" => "consortium_organization", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { + "type": "providers", "id": consortium.symbol.downcase + }, + }, + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) expect(json.dig("data", "attributes", "name")).to eq("Figshare") - expect(json.dig("data", "attributes", "memberType")).to eq("consortium_organization") - expect(json.dig("data", "relationships", "consortium", "data", "id")).to eq(consortium.symbol.downcase) + expect(json.dig("data", "attributes", "memberType")).to eq( + "consortium_organization", + ) + expect( + json.dig("data", "relationships", "consortium", "data", "id"), + ).to eq(consortium.symbol.downcase) sleep 1 - get "/providers/#{consortium.symbol.downcase}?include=consortium-organizations", params: nil, session: headers + get "/providers/#{ + consortium.symbol.downcase + }?include=consortium-organizations", + params: nil, session: headers expect(last_response.status).to eq(200) - expect(json.dig("included", 0, "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("included", 0, "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) expect(json.dig("included", 0, "attributes", "name")).to eq("Figshare") - expect(json.dig("included", 0, "attributes", "memberType")).to eq("consortium_organization") - expect(json.dig("included", 0, "relationships", "consortium", "data", "id")).to eq(consortium.symbol) + expect(json.dig("included", 0, "attributes", "memberType")).to eq( + "consortium_organization", + ) + expect( + json.dig("included", 0, "relationships", "consortium", "data", "id"), + ).to eq(consortium.symbol) # get "/providers?consortium-lead-id=#{consortium_lead.symbol.downcase}", nil, headers @@ -306,66 +419,79 @@ context "create provider not member_role consortium_organization" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "FG", - "name" => "Figshare", - "displayName" => "Figshare", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "memberType" => "provider", - "website" => "https://www.bl.uk", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": consortium.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "FG", + "name" => "Figshare", + "displayName" => "Figshare", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "memberType" => "provider", + "website" => "https://www.bl.uk", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { + "type": "providers", "id": consortium.symbol.downcase + }, + }, + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) expect(json.dig("data", "attributes", "name")).to eq("Figshare") - expect(json.dig("data", "attributes", "memberType")).to eq("direct_member") - expect(json.dig("data", "relationships", "consortium", "data", "id")).to be_nil + expect(json.dig("data", "attributes", "memberType")).to eq( + "direct_member", + ) + expect( + json.dig("data", "relationships", "consortium", "data", "id"), + ).to be_nil end end context "create provider not member_role consortium" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "FG", - "name" => "Figshare", - "displayName" => "Figshare", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "website" => "https://www.bl.uk", - "memberType" => "consortium_organization", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": provider.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "FG", + "name" => "Figshare", + "displayName" => "Figshare", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "website" => "https://www.bl.uk", + "memberType" => "consortium_organization", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { "type": "providers", "id": provider.symbol.downcase }, + }, + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "consortium_id", "title" => "The consortium must be of member_type consortium") + expect(json["errors"].first).to eq( + "source" => "consortium_id", + "title" => "The consortium must be of member_type consortium", + ) end end @@ -413,11 +539,21 @@ post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("jkiritha@andrew.cmu.edu") - expect(json.dig("data", "attributes", "billingInformation", "state")).to eq("Rennes") - expect(json.dig("data", "attributes", "billingInformation", "postCode")).to eq("122dc") - expect(json.dig("data", "attributes", "twitterHandle")).to eq("@meekakitty") - expect(json.dig("data", "attributes", "rorId")).to eq("https://ror.org/05njkjr15") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "jkiritha@andrew.cmu.edu", + ) + expect( + json.dig("data", "attributes", "billingInformation", "state"), + ).to eq("Rennes") + expect( + json.dig("data", "attributes", "billingInformation", "postCode"), + ).to eq("122dc") + expect(json.dig("data", "attributes", "twitterHandle")).to eq( + "@meekakitty", + ) + expect(json.dig("data", "attributes", "rorId")).to eq( + "https://ror.org/05njkjr15", + ) end end @@ -480,21 +616,61 @@ post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "technicalContact", "email")).to eq("kristian@example.com") - expect(json.dig("data", "attributes", "technicalContact", "givenName")).to eq("Kristian") - expect(json.dig("data", "attributes", "technicalContact", "familyName")).to eq("Garza") - expect(json.dig("data", "attributes", "billingContact", "email")).to eq("Trisha@example.com") - expect(json.dig("data", "attributes", "billingContact", "givenName")).to eq("Trisha") - expect(json.dig("data", "attributes", "billingContact", "familyName")).to eq("cruse") - expect(json.dig("data", "attributes", "secondaryBillingContact", "email")).to eq("Trisha@example.com") - expect(json.dig("data", "attributes", "secondaryBillingContact", "givenName")).to eq("Trisha") - expect(json.dig("data", "attributes", "secondaryBillingContact", "familyName")).to eq("cruse") - expect(json.dig("data", "attributes", "serviceContact", "email")).to eq("martin@example.com") - expect(json.dig("data", "attributes", "serviceContact", "givenName")).to eq("Martin") - expect(json.dig("data", "attributes", "serviceContact", "familyName")).to eq("Fenner") - expect(json.dig("data", "attributes", "votingContact", "email")).to eq("robin@example.com") - expect(json.dig("data", "attributes", "votingContact", "givenName")).to eq("Robin") - expect(json.dig("data", "attributes", "votingContact", "familyName")).to eq("Dasler") + expect( + json.dig("data", "attributes", "technicalContact", "email"), + ).to eq("kristian@example.com") + expect( + json.dig("data", "attributes", "technicalContact", "givenName"), + ).to eq("Kristian") + expect( + json.dig("data", "attributes", "technicalContact", "familyName"), + ).to eq("Garza") + expect(json.dig("data", "attributes", "billingContact", "email")).to eq( + "Trisha@example.com", + ) + expect( + json.dig("data", "attributes", "billingContact", "givenName"), + ).to eq("Trisha") + expect( + json.dig("data", "attributes", "billingContact", "familyName"), + ).to eq("cruse") + expect( + json.dig("data", "attributes", "secondaryBillingContact", "email"), + ).to eq("Trisha@example.com") + expect( + json.dig( + "data", + "attributes", + "secondaryBillingContact", + "givenName", + ), + ).to eq("Trisha") + expect( + json.dig( + "data", + "attributes", + "secondaryBillingContact", + "familyName", + ), + ).to eq("cruse") + expect(json.dig("data", "attributes", "serviceContact", "email")).to eq( + "martin@example.com", + ) + expect( + json.dig("data", "attributes", "serviceContact", "givenName"), + ).to eq("Martin") + expect( + json.dig("data", "attributes", "serviceContact", "familyName"), + ).to eq("Fenner") + expect(json.dig("data", "attributes", "votingContact", "email")).to eq( + "robin@example.com", + ) + expect( + json.dig("data", "attributes", "votingContact", "givenName"), + ).to eq("Robin") + expect( + json.dig("data", "attributes", "votingContact", "familyName"), + ).to eq("Dasler") end end @@ -504,25 +680,26 @@ "data" => { "attributes" => { "meta" => { - "clients" => [{ - "id" => "2019", - "title" => "2019", - "count" => 1, - }], + "clients" => [ + { "id" => "2019", "title" => "2019", "count" => 1 }, + ], "dois" => [], }, "name" => "Carnegie Mellon University", "displayName" => "Carnegie Mellon University", - "symbol" => "CMU", "description" => nil, - "region" => "AMER", "country" => "US", + "symbol" => "CMU", + "description" => nil, + "region" => "AMER", + "country" => "US", "organizationType" => "academicInstitution", - "focusArea" => "general", "logoUrl" => "", + "focusArea" => "general", + "logoUrl" => "", "systemEmail" => "jkiritha@andrew.cmu.edu", "isActive" => true, "passwordInput" => "@change", "hasPassword" => false, "keepPassword" => false, - "joined" => "" + "joined" => "", }, "type" => "providers", }, @@ -533,99 +710,138 @@ post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("jkiritha@andrew.cmu.edu") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "jkiritha@andrew.cmu.edu", + ) end end context "request for admin provider" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "ADMIN", - "name" => "Admin", - "displayName" => "Admin", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "ADMIN", + "name" => "Admin", + "displayName" => "Admin", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "country" => "GB", + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) end end context "request uses basic auth" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "BL", - "name" => "British Library", - "displayName" => "British Library", - "website" => "https://www.bl.uk", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "country" => "GB", - } } } - end - let(:admin) { create(:provider, symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345") } - let(:credentials) { admin.encode_auth_param(username: "ADMIN", password: "12345") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + credentials } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "BL", + "name" => "British Library", + "displayName" => "British Library", + "website" => "https://www.bl.uk", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "country" => "GB", + }, + }, + } + end + let(:admin) do + create( + :provider, + symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345", + ) + end + let(:credentials) do + admin.encode_auth_param(username: "ADMIN", password: "12345") + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Basic " + credentials, + } + end it "creates a provider" do post "/providers", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("doe@joe.joe") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "doe@joe.joe", + ) end end context "generate random symbol" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "Admin", - "displayName" => "Admin", - "region" => "EMEA", - "systemEmail" => "doe@joe.joe", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "Admin", + "displayName" => "Admin", + "region" => "EMEA", + "systemEmail" => "doe@joe.joe", + "country" => "GB", + }, + }, + } end it "creates a provider" do post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "symbol")).to match(/\A[A-Z]{4}\Z/) + expect(json.dig("data", "attributes", "symbol")).to match( + /\A[A-Z]{4}\Z/, + ) end end context "when the request is missing a required attribute" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "symbol" => "BL", - "name" => "British Library", - "displayName" => "British Library", - "website" => "https://www.bl.uk", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "symbol" => "BL", + "name" => "British Library", + "displayName" => "British Library", + "website" => "https://www.bl.uk", + "country" => "GB", + }, + }, + } end it "returns a validation failure message" do post "/providers", params: params, session: admin_headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "system_email", "title" => "Can't be blank") + expect(json["errors"].first).to eq( + "source" => "system_email", "title" => "Can't be blank", + ) end end context "when the request is missing a data object" do let(:params) do - { "type" => "providers", + { + "type" => "providers", "attributes" => { "symbol" => "BL", "systemEmail" => "timAus", @@ -633,7 +849,8 @@ "displayName" => "British Library", "website" => "https://www.bl.uk", "country" => "GB", - } } + }, + } end it "returns status code 400" do @@ -651,181 +868,256 @@ describe "PUT /providers/:id" do context "when the record exists" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "British Library", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - "displayName" => "British Library", - "memberType" => "consortium_organization", - "website" => "https://www.bl.uk", - "region" => "Americas", - "systemEmail" => "Pepe@mdm.cod", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": consortium.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "British Library", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + "displayName" => "British Library", + "memberType" => "consortium_organization", + "website" => "https://www.bl.uk", + "region" => "Americas", + "systemEmail" => "Pepe@mdm.cod", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { + "type": "providers", "id": consortium.symbol.downcase + }, + }, + }, + }, + } end it "updates the record" do put "/providers/#{provider.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "displayName")).to eq("British Library") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") - expect(json.dig("data", "relationships", "consortium", "data", "id")).to eq(consortium.symbol.downcase) + expect(json.dig("data", "attributes", "displayName")).to eq( + "British Library", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) + expect( + json.dig("data", "relationships", "consortium", "data", "id"), + ).to eq(consortium.symbol.downcase) end end context "when updating as consortium" do - let(:consortium_credentials) { User.encode_auth_param(username: consortium.symbol, password: "12345") } - let(:consortium_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + consortium_credentials } } + let(:consortium_credentials) do + User.encode_auth_param(username: consortium.symbol, password: "12345") + end + let(:consortium_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Basic " + consortium_credentials, + } + end let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "British Library", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - "displayName" => "British Library", - "memberType" => "consortium_organization", - "website" => "https://www.bl.uk", - "region" => "Americas", - "systemEmail" => "Pepe@mdm.cod", - "country" => "GB", - }, - "relationships": { - "consortium": { - "data": { - "type": "providers", - "id": consortium.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "British Library", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + "displayName" => "British Library", + "memberType" => "consortium_organization", + "website" => "https://www.bl.uk", + "region" => "Americas", + "systemEmail" => "Pepe@mdm.cod", + "country" => "GB", + }, + "relationships": { + "consortium": { + "data": { + "type": "providers", "id": consortium.symbol.downcase + }, + }, + }, + }, + } end it "updates the record" do - put "/providers/#{provider.symbol}", params: params, session: consortium_headers + put "/providers/#{provider.symbol}", + params: params, session: consortium_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "displayName")).to eq("British Library") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") - expect(json.dig("data", "relationships", "consortium", "data", "id")).to eq(consortium.symbol.downcase) + expect(json.dig("data", "attributes", "displayName")).to eq( + "British Library", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) + expect( + json.dig("data", "relationships", "consortium", "data", "id"), + ).to eq(consortium.symbol.downcase) end end context "when updating as consortium_organization" do - let(:consortium_organization_credentials) { User.encode_auth_param(username: provider.symbol, password: "12345") } - let(:consortium_organization_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + consortium_organization_credentials } } + let(:consortium_organization_credentials) do + User.encode_auth_param(username: provider.symbol, password: "12345") + end + let(:consortium_organization_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => + "Basic " + consortium_organization_credentials, + } + end let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "British Library", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - "displayName" => "British Library", - "website" => "https://www.bl.uk", - "region" => "Americas", - "systemEmail" => "Pepe@mdm.cod", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "British Library", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + "displayName" => "British Library", + "website" => "https://www.bl.uk", + "region" => "Americas", + "systemEmail" => "Pepe@mdm.cod", + "country" => "GB", + }, + }, + } end it "updates the record" do - put "/providers/#{provider.symbol}", params: params, session: consortium_organization_headers + put "/providers/#{provider.symbol}", + params: params, session: consortium_organization_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "displayName")).to eq("British Library") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") + expect(json.dig("data", "attributes", "displayName")).to eq( + "British Library", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) end end context "removes globus_uuid" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "globusUuid" => nil, - } } } + { + "data" => { + "type" => "providers", "attributes" => { "globusUuid" => nil } + }, + } end it "updates the record" do put "/providers/#{provider.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "displayName")).to eq("My provider") + expect(json.dig("data", "attributes", "displayName")).to eq( + "My provider", + ) expect(json.dig("data", "attributes", "globusUuid")).to be_nil end end context "invalid globus_uuid" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "globusUuid" => "abc", - } } } + { + "data" => { + "type" => "providers", "attributes" => { "globusUuid" => "abc" } + }, + } end it "updates the record" do put "/providers/#{provider.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "globus_uuid", "title" => "Abc is not a valid UUID") + expect(json["errors"].first).to eq( + "source" => "globus_uuid", "title" => "Abc is not a valid UUID", + ) end end context "ror_id in wrong format" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "rorId" => "ror.org/05njkjr15", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { "rorId" => "ror.org/05njkjr15" }, + }, + } end it "raises error" do put "/providers/#{provider.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "ror_id", "title" => "ROR ID should be a url") + expect(json["errors"].first).to eq( + "source" => "ror_id", "title" => "ROR ID should be a url", + ) end end context "using basic auth" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "British Library", - "displayName" => "British Library", - "region" => "Americas", - "systemEmail" => "Pepe@mdm.cod", - "website" => "https://www.bl.uk", - "country" => "GB", - } } } - end - let(:admin) { create(:provider, symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345") } - let(:credentials) { admin.encode_auth_param(username: "ADMIN", password: "12345") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + credentials } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "British Library", + "displayName" => "British Library", + "region" => "Americas", + "systemEmail" => "Pepe@mdm.cod", + "website" => "https://www.bl.uk", + "country" => "GB", + }, + }, + } + end + let(:admin) do + create( + :provider, + symbol: "ADMIN", role_name: "ROLE_ADMIN", password_input: "12345", + ) + end + let(:credentials) do + admin.encode_auth_param(username: "ADMIN", password: "12345") + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Basic " + credentials, + } + end it "updates the record" do put "/providers/#{provider.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "systemEmail")).to eq("Pepe@mdm.cod") + expect(json.dig("data", "attributes", "systemEmail")).to eq( + "Pepe@mdm.cod", + ) end end context "when the resource doesn't exist" do let(:params) do - { "data" => { "type" => "providers", - "attributes" => { - "name" => "British Library", - "displayName" => "British Library", - "region" => "Americas", - "website" => "https://www.bl.uk", - "systemEmail" => "Pepe@mdm.cod", - "country" => "GB", - } } } + { + "data" => { + "type" => "providers", + "attributes" => { + "name" => "British Library", + "displayName" => "British Library", + "region" => "Americas", + "website" => "https://www.bl.uk", + "systemEmail" => "Pepe@mdm.cod", + "country" => "GB", + }, + }, + } end it "returns status code 404" do @@ -845,7 +1137,8 @@ end it "deletes the provider" do - delete "/providers/#{provider.symbol.downcase}", params: nil, session: admin_headers + delete "/providers/#{provider.symbol.downcase}", + params: nil, session: admin_headers expect(last_response.status).to eq(204) end end diff --git a/spec/requests/random_spec.rb b/spec/requests/random_spec.rb index e392b32dd..4b6037d0b 100644 --- a/spec/requests/random_spec.rb +++ b/spec/requests/random_spec.rb @@ -1,8 +1,15 @@ +# frozen_string_literal: true + require "rails_helper" describe "random", type: :request do let(:token) { User.generate_token } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + token } } + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + token, + } + end context "random string" do it "creates a random string" do diff --git a/spec/requests/repositories_spec.rb b/spec/requests/repositories_spec.rb index c40cfc2ae..de7bee6ff 100644 --- a/spec/requests/repositories_spec.rb +++ b/spec/requests/repositories_spec.rb @@ -1,33 +1,64 @@ +# frozen_string_literal: true + require "rails_helper" describe RepositoriesController, type: :request, elasticsearch: true do let(:ids) { clients.map(&:uid).join(",") } let(:consortium) { create(:provider, role_name: "ROLE_CONSORTIUM") } - let(:provider) { create(:provider, consortium: consortium, symbol: "ABC", role_name: "ROLE_CONSORTIUM_ORGANIZATION", password_input: "12345") } - let!(:client) { create(:client, provider: provider, client_type: "repository") } - let(:bearer) { User.generate_token(role_id: "provider_admin", provider_id: provider.symbol.downcase) } - let(:consortium_bearer) { User.generate_token(role_id: "consortium_admin", provider_id: consortium.symbol.downcase) } + let(:provider) do + create( + :provider, + consortium: consortium, + symbol: "ABC", + role_name: "ROLE_CONSORTIUM_ORGANIZATION", + password_input: "12345", + ) + end + let!(:client) do + create(:client, provider: provider, client_type: "repository") + end + let(:bearer) do + User.generate_token( + role_id: "provider_admin", provider_id: provider.symbol.downcase, + ) + end + let(:consortium_bearer) do + User.generate_token( + role_id: "consortium_admin", provider_id: consortium.symbol.downcase, + ) + end let(:params) do - { "data" => { "type" => "clients", - "attributes" => { - "symbol" => provider.symbol + ".IMPERIAL", - "name" => "Imperial College", - "systemEmail" => "bob@example.com", - "salesforceId" => "abc012345678901234", - "clientType" => "repository", - "certificate" => ["CoreTrustSeal"], - }, - "relationships": { - "provider": { - "data": { - "type": "providers", - "id": provider.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "clients", + "attributes" => { + "symbol" => provider.symbol + ".IMPERIAL", + "name" => "Imperial College", + "systemEmail" => "bob@example.com", + "salesforceId" => "abc012345678901234", + "clientType" => "repository", + "certificate" => %w[CoreTrustSeal], + }, + "relationships": { + "provider": { + "data": { "type": "providers", "id": provider.symbol.downcase }, + }, + }, + }, + } + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end + let(:consortium_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + consortium_bearer, + } end - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } - let(:consortium_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + consortium_bearer } } let(:query) { "jamon" } describe "GET /repositories", elasticsearch: true do @@ -45,7 +76,11 @@ expect(json["data"].size).to eq(4) expect(json.dig("meta", "total")).to eq(4) expect(json.dig("meta", "providers").length).to eq(4) - expect(json.dig("meta", "providers").first).to eq("count" => 1, "id" => provider.symbol.downcase, "title" => "My provider") + expect(json.dig("meta", "providers").first).to eq( + "count" => 1, + "id" => provider.symbol.downcase, + "title" => "My provider", + ) end end @@ -86,7 +121,9 @@ expect(last_response.status).to eq(200) expect(json.dig("data", "attributes", "name")).to eq(client.name) - expect(json.dig("data", "attributes", "globusUuid")).to eq("bc7d0274-3472-4a79-b631-e4c7baccc667") + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "bc7d0274-3472-4a79-b631-e4c7baccc667", + ) expect(json["meta"]).to eq("doiCount" => 0, "prefixCount" => 0) end end @@ -96,14 +133,23 @@ get "/repositories/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "GET /repositories/totals" do let(:client) { create(:client) } - let!(:datacite_dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end before do DataciteDoi.import @@ -116,7 +162,9 @@ expect(last_response.status).to eq(200) expect(json.first.dig("count")).to eq(3) - expect(json.first.dig("states")).to eq([{ "count" => 3, "id" => "findable", "title" => "Findable" }]) + expect(json.first.dig("states")).to eq( + [{ "count" => 3, "id" => "findable", "title" => "Findable" }], + ) expect(json.first.dig("temporal")).not_to be_nil end end @@ -125,7 +173,13 @@ let(:provider) { create(:provider) } let(:client) { create(:client) } let!(:client_prefix) { create(:client_prefix, client: client) } - let!(:datacite_dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end before do DataciteDoi.import @@ -147,7 +201,13 @@ describe "GET /repositories/:id/stats" do let(:provider) { create(:provider) } let(:client) { create(:client) } - let!(:datacite_dois) { create_list(:doi, 3, client: client, aasm_state: "findable", type: "DataciteDoi") } + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, aasm_state: "findable", type: "DataciteDoi", + ) + end before do Provider.import @@ -160,8 +220,12 @@ get "/repositories/#{client.uid}/stats" expect(last_response.status).to eq(200) - expect(json["resourceTypes"]).to eq([{ "count" => 3, "id" => "dataset", "title" => "Dataset" }]) - expect(json["dois"]).to eq([{ "count" => 3, "id" => "2020", "title" => "2020" }]) + expect(json["resourceTypes"]).to eq( + [{ "count" => 3, "id" => "dataset", "title" => "Dataset" }], + ) + expect(json["dois"]).to eq( + [{ "count" => 3, "id" => "2020", "title" => "2020" }], + ) end end @@ -174,11 +238,13 @@ attributes = json.dig("data", "attributes") expect(attributes["name"]).to eq("Imperial College") expect(attributes["systemEmail"]).to eq("bob@example.com") - expect(attributes["certificate"]).to eq(["CoreTrustSeal"]) + expect(attributes["certificate"]).to eq(%w[CoreTrustSeal]) expect(attributes["salesforceId"]).to eq("abc012345678901234") relationships = json.dig("data", "relationships") - expect(relationships.dig("provider", "data", "id")).to eq(provider.symbol.downcase) + expect(relationships.dig("provider", "data", "id")).to eq( + provider.symbol.downcase, + ) end end @@ -190,36 +256,44 @@ attributes = json.dig("data", "attributes") expect(attributes["name"]).to eq("Imperial College") expect(attributes["systemEmail"]).to eq("bob@example.com") - expect(attributes["certificate"]).to eq(["CoreTrustSeal"]) + expect(attributes["certificate"]).to eq(%w[CoreTrustSeal]) expect(attributes["salesforceId"]).to eq("abc012345678901234") relationships = json.dig("data", "relationships") - expect(relationships.dig("provider", "data", "id")).to eq(provider.symbol.downcase) + expect(relationships.dig("provider", "data", "id")).to eq( + provider.symbol.downcase, + ) end end context "when the request is invalid" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "symbol" => provider.symbol + ".IMPERIAL", - "name" => "Imperial College", - }, - "relationships": { - "provider": { - "data": { - "type": "providers", - "id": provider.symbol.downcase, - }, - }, - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { + "symbol" => provider.symbol + ".IMPERIAL", + "name" => "Imperial College", + }, + "relationships": { + "provider": { + "data": { "type": "providers", "id": provider.symbol.downcase }, + }, + }, + }, + } end it "returns status code 422" do post "/repositories", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"]).to eq([{ "source" => "system_email", "title" => "Can't be blank" }, { "source" => "system_email", "title" => "Is invalid" }]) + expect(json["errors"]).to eq( + [ + { "source" => "system_email", "title" => "Can't be blank" }, + { "source" => "system_email", "title" => "Is invalid" }, + ], + ) end end end @@ -227,20 +301,28 @@ describe "PUT /repositories/:id" do context "when the record exists" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "name" => "Imperial College 2", - "clientType" => "periodical", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { + "name" => "Imperial College 2", + "clientType" => "periodical", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + }, + }, + } end it "updates the record" do put "/repositories/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Imperial College 2") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") + expect(json.dig("data", "attributes", "name")).to eq( + "Imperial College 2", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) expect(json.dig("data", "attributes", "clientType")).to eq("periodical") end @@ -248,20 +330,29 @@ context "consortium" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "name" => "Imperial College 2", - "clientType" => "periodical", - "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { + "name" => "Imperial College 2", + "clientType" => "periodical", + "globusUuid" => "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + }, + }, + } end it "updates the record" do - put "/repositories/#{client.symbol}", params: params, session: consortium_headers + put "/repositories/#{client.symbol}", + params: params, session: consortium_headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Imperial College 2") - expect(json.dig("data", "attributes", "globusUuid")).to eq("9908a164-1e4f-4c17-ae1b-cc318839d6c8") + expect(json.dig("data", "attributes", "name")).to eq( + "Imperial College 2", + ) + expect(json.dig("data", "attributes", "globusUuid")).to eq( + "9908a164-1e4f-4c17-ae1b-cc318839d6c8", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) expect(json.dig("data", "attributes", "clientType")).to eq("periodical") end @@ -269,10 +360,11 @@ context "removes the globus_uuid" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "globusUuid" => nil, - } } } + { + "data" => { + "type" => "repositories", "attributes" => { "globusUuid" => nil } + }, + } end it "updates the record" do @@ -286,12 +378,28 @@ context "transfer repository" do let(:bearer) { User.generate_token } - let(:staff_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:staff_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end - let(:new_provider) { create(:provider, symbol: "QUECHUA", password_input: "12345") } + let(:new_provider) do + create(:provider, symbol: "QUECHUA", password_input: "12345") + end let!(:prefix) { create(:prefix) } - let!(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix_id: provider_prefix.uid) } + let!(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end + let!(:client_prefix) do + create( + :client_prefix, + client: client, + prefix: prefix, + provider_prefix_id: provider_prefix.uid, + ) + end let(:doi) { create_list(:doi, 10, client: client) } let(:params) do @@ -299,106 +407,145 @@ "data" => { "type" => "clients", "attributes" => { - "mode" => "transfer", - "targetId" => new_provider.symbol, + "mode" => "transfer", "targetId" => new_provider.symbol }, }, } end it "updates the record" do - put "/repositories/#{client.symbol}", params: params, session: staff_headers + put "/repositories/#{client.symbol}", + params: params, session: staff_headers expect(last_response.status).to eq(200) expect(json.dig("data", "attributes", "name")).to eq("My data center") - expect(json.dig("data", "relationships", "provider", "data", "id")).to eq("quechua") - expect(json.dig("data", "relationships", "prefixes", "data").first.dig("id")).to eq(prefix.uid) + expect( + json.dig("data", "relationships", "provider", "data", "id"), + ).to eq("quechua") + expect( + json.dig("data", "relationships", "prefixes", "data").first.dig("id"), + ).to eq(prefix.uid) get "/providers/#{provider.symbol}" - expect(json.dig("data", "relationships", "prefixes", "data")).to be_empty + expect( + json.dig("data", "relationships", "prefixes", "data"), + ).to be_empty get "/providers/#{new_provider.symbol}" - expect(json.dig("data", "relationships", "prefixes", "data").first.dig("id")).to eq(prefix.uid) + expect( + json.dig("data", "relationships", "prefixes", "data").first.dig("id"), + ).to eq(prefix.uid) get "/prefixes/#{prefix.uid}" - expect(json.dig("data", "relationships", "clients", "data").first.dig("id")).to eq(client.symbol.downcase) + expect( + json.dig("data", "relationships", "clients", "data").first.dig("id"), + ).to eq(client.symbol.downcase) end end context "invalid globus_uuid" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "globusUuid" => "abc", - } } } + { + "data" => { + "type" => "repositories", "attributes" => { "globusUuid" => "abc" } + }, + } end it "updates the record" do put "/repositories/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "globus_uuid", "title" => "Abc is not a valid UUID") + expect(json["errors"].first).to eq( + "source" => "globus_uuid", "title" => "Abc is not a valid UUID", + ) end end context "using basic auth", vcr: true do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "name" => "Imperial College 2", - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { "name" => "Imperial College 2" }, + }, + } + end + let(:credentials) do + provider.encode_auth_param( + username: provider.symbol.downcase, password: "12345", + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Basic " + credentials, + } end - let(:credentials) { provider.encode_auth_param(username: provider.symbol.downcase, password: "12345") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Basic " + credentials } } it "updates the record" do put "/repositories/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Imperial College 2") + expect(json.dig("data", "attributes", "name")).to eq( + "Imperial College 2", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) end end context "updating with ISSNs" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "name" => "Journal of Insignificant Results", - "clientType" => "periodical", - "issn" => { "electronic" => "1544-9173", - "print" => "1545-7885" }, - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { + "name" => "Journal of Insignificant Results", + "clientType" => "periodical", + "issn" => { "electronic" => "1544-9173", "print" => "1545-7885" }, + }, + }, + } end it "updates the record" do put "/repositories/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "name")).to eq("Journal of Insignificant Results") + expect(json.dig("data", "attributes", "name")).to eq( + "Journal of Insignificant Results", + ) expect(json.dig("data", "attributes", "name")).not_to eq(client.name) expect(json.dig("data", "attributes", "clientType")).to eq("periodical") - expect(json.dig("data", "attributes", "issn")).to eq("electronic" => "1544-9173", "print" => "1545-7885") + expect(json.dig("data", "attributes", "issn")).to eq( + "electronic" => "1544-9173", "print" => "1545-7885", + ) end end context "when the request is invalid" do let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "symbol" => client.symbol + "M", - "email" => "bob@example.com", - "name" => "Imperial College", - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { + "symbol" => client.symbol + "M", + "email" => "bob@example.com", + "name" => "Imperial College", + }, + }, + } end it "returns status code 422" do put "/repositories/#{client.symbol}", params: params, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "symbol", "title" => "Cannot be changed") + expect(json["errors"].first).to eq( + "source" => "symbol", "title" => "Cannot be changed", + ) end end end @@ -411,7 +558,8 @@ end it "returns status code 204 with consortium" do - delete "/repositories/#{client.uid}", params: nil, session: consortium_headers + delete "/repositories/#{client.uid}", + params: nil, session: consortium_headers expect(last_response.status).to eq(204) end @@ -426,19 +574,31 @@ it "returns a validation failure message" do delete "/repositories/xxx", params: nil, session: headers - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "doi transfer", elasticsearch: true do let!(:dois) { create_list(:doi, 3, client: client) } - let(:target) { create(:client, provider: provider, symbol: provider.symbol + ".TARGET", name: "Target Client") } + let(:target) do + create( + :client, + provider: provider, + symbol: provider.symbol + ".TARGET", + name: "Target Client", + ) + end let(:params) do - { "data" => { "type" => "repositories", - "attributes" => { - "targetId" => target.symbol, - } } } + { + "data" => { + "type" => "repositories", + "attributes" => { "targetId" => target.symbol }, + }, + } end before do @@ -456,7 +616,8 @@ end it "transfered all DOIs consortium" do - put "/repositories/#{client.symbol}", params: params, session: consortium_headers + put "/repositories/#{client.symbol}", + params: params, session: consortium_headers sleep 1 expect(last_response.status).to eq(200) diff --git a/spec/requests/repository_prefixes_spec.rb b/spec/requests/repository_prefixes_spec.rb index d2479d561..c8ddcc54a 100644 --- a/spec/requests/repository_prefixes_spec.rb +++ b/spec/requests/repository_prefixes_spec.rb @@ -1,14 +1,28 @@ +# frozen_string_literal: true + require "rails_helper" describe RepositoryPrefixesController, type: :request do let(:prefix) { create(:prefix) } let(:provider) { create(:provider) } let(:client) { create(:client, provider: provider) } - let(:provider_prefix) { create(:provider_prefix, provider: provider, prefix: prefix) } + let(:provider_prefix) do + create(:provider_prefix, provider: provider, prefix: prefix) + end let!(:client_prefixes) { create_list(:client_prefix, 5) } - let(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix: provider_prefix) } + let(:client_prefix) do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix: provider_prefix, + ) + end let(:bearer) { User.generate_token(role_id: "staff_admin") } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end describe "GET /repository-prefixes", elasticsearch: true do before do @@ -25,14 +39,18 @@ end it "returns repository-prefixes by repository-id" do - get "/repository-prefixes?repository-id=#{client_prefixes.first.client_id}", params: nil, session: headers + get "/repository-prefixes?repository-id=#{ + client_prefixes.first.client_id + }", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) end it "returns repository-prefixes by prefix-id" do - get "/repository-prefixes?prefix-id=#{client_prefixes.first.prefix_id}", params: nil, session: headers + get "/repository-prefixes?prefix-id=#{client_prefixes.first.prefix_id}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) @@ -46,14 +64,18 @@ end it "returns repository-prefixes by repository-id and prefix-id" do - get "/repository-prefixes?repository-id=#{client_prefixes.first.client_id}&#{client_prefixes.first.prefix_id}", params: nil, session: headers + get "/repository-prefixes?repository-id=#{ + client_prefixes.first.client_id + }&#{client_prefixes.first.prefix_id}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) end it "returns prefixes by client-id" do - get "/prefixes?client-id=#{client_prefixes.first.client_id}", params: nil, session: headers + get "/prefixes?client-id=#{client_prefixes.first.client_id}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json["data"].size).to eq(1) @@ -63,7 +85,8 @@ describe "GET /repository-prefixes/:uid" do context "when the record exists" do it "returns the repository-prefix" do - get "/repository-prefixes/#{client_prefix.uid}", params: nil, session: headers + get "/repository-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(200) expect(json.dig("data", "id")).to eq(client_prefix.uid) @@ -75,22 +98,33 @@ get "/repository-prefixes/xxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json["errors"].first).to eq("status" => "404", "title" => "The resource you are looking for doesn't exist.") + expect(json["errors"].first).to eq( + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + ) end end end describe "PATCH /repository-prefixes/:uid" do it "returns method not supported error" do - patch "/repository-prefixes/#{client_prefix.uid}", params: nil, session: headers + patch "/repository-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(405) - expect(json.dig("errors")).to eq([{ "status" => "405", "title" => "Method not allowed" }]) + expect(json.dig("errors")).to eq( + [{ "status" => "405", "title" => "Method not allowed" }], + ) end end describe "DELETE /repository-prefixes/:uid", elasticsearch: true do - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix, provider_prefix: provider_prefix) } + let!(:client_prefix) do + create( + :client_prefix, + client: client, prefix: prefix, provider_prefix: provider_prefix, + ) + end before do ClientPrefix.import @@ -98,7 +132,8 @@ end it "deletes a repository-prefix" do - delete "/repository-prefixes/#{client_prefix.uid}", params: nil, session: headers + delete "/repository-prefixes/#{client_prefix.uid}", + params: nil, session: headers expect(last_response.status).to eq(204) end @@ -120,23 +155,12 @@ "type" => "client-prefixes", "relationships": { "repository": { - "data": { - "type": "repository", - "id": client.symbol.downcase, - }, + "data": { "type": "repository", "id": client.symbol.downcase }, }, "provider-prefix": { - "data": { - "type": "provider-prefix", - "id": provider_prefix.uid, - }, - }, - "prefix": { - "data": { - "type": "prefix", - "id": prefix.uid, - }, + "data": { "type": "provider-prefix", "id": provider_prefix.uid }, }, + "prefix": { "data": { "type": "prefix", "id": prefix.uid } }, }, }, } @@ -153,18 +177,17 @@ context "when the request is invalid" do let!(:client) { create(:client) } let(:not_valid_attributes) do - { - "data" => { - "type" => "repository-prefixes", - }, - } + { "data" => { "type" => "repository-prefixes" } } end it "returns status code 422" do - post "/repository-prefixes", params: not_valid_attributes, session: headers + post "/repository-prefixes", + params: not_valid_attributes, session: headers expect(last_response.status).to eq(422) - expect(json["errors"].first).to eq("source" => "client", "title" => "Must exist") + expect(json["errors"].first).to eq( + "source" => "client", "title" => "Must exist", + ) end end end diff --git a/spec/requests/sessions_spec.rb b/spec/requests/sessions_spec.rb index 08eec22e2..d05226cdb 100644 --- a/spec/requests/sessions_spec.rb +++ b/spec/requests/sessions_spec.rb @@ -1,10 +1,14 @@ +# frozen_string_literal: true + require "rails_helper" describe "Provider session", type: :request do let!(:provider) { create(:provider, password_input: "12345") } context "request is valid" do - let(:params) { "grant_type=password&username=#{provider.symbol}&password=12345" } + let(:params) do + "grant_type=password&username=#{provider.symbol}&password=12345" + end it "creates a provider token" do post "/token", params: params @@ -18,13 +22,19 @@ end context "wrong grant_type" do - let(:params) { "grant_type=client_credentials&client_id=#{provider.symbol}&client_secret=12345" } + let(:params) do + "grant_type=client_credentials&client_id=#{ + provider.symbol + }&client_secret=12345" + end it "returns an error" do post "/token", params: params expect(last_response.status).to eq(400) - expect(json.fetch("errors", {})).to eq([{ "status" => "400", "title" => "Wrong grant type." }]) + expect(json.fetch("errors", {})).to eq( + [{ "status" => "400", "title" => "Wrong grant type." }], + ) end end @@ -35,27 +45,43 @@ post "/token", params: params expect(last_response.status).to eq(400) - expect(json.fetch("errors", {})).to eq([{ "status" => "400", "title" => "Missing account ID or password." }]) + expect(json.fetch("errors", {})).to eq( + [{ "status" => "400", "title" => "Missing account ID or password." }], + ) end end context "wrong password" do - let(:params) { "grant_type=password&username=#{provider.symbol}&password=12346" } + let(:params) do + "grant_type=password&username=#{provider.symbol}&password=12346" + end it "returns an error" do post "/token", params: params expect(last_response.status).to eq(400) - expect(json.fetch("errors", {})).to eq([{ "status" => "400", "title" => "Wrong account ID or password." }]) + expect(json.fetch("errors", {})).to eq( + [{ "status" => "400", "title" => "Wrong account ID or password." }], + ) end end end describe "Admin session", type: :request do - let!(:provider) { create(:provider, role_name: "ROLE_ADMIN", name: "Admin", symbol: "ADMIN", password_input: "12345") } + let!(:provider) do + create( + :provider, + role_name: "ROLE_ADMIN", + name: "Admin", + symbol: "ADMIN", + password_input: "12345", + ) + end context "request is valid" do - let(:params) { "grant_type=password&username=#{provider.symbol}&password=12345" } + let(:params) do + "grant_type=password&username=#{provider.symbol}&password=12345" + end it "creates a provider token" do post "/token", params: params @@ -72,7 +98,9 @@ let!(:client) { create(:client, password_input: "12345") } context "request is valid" do - let(:params) { "grant_type=password&username=#{client.symbol}&password=12345" } + let(:params) do + "grant_type=password&username=#{client.symbol}&password=12345" + end it "creates a client token" do post "/token", params: params @@ -87,8 +115,15 @@ end describe "reset", type: :request, vcr: true do - let(:provider) { create(:provider, symbol: "DATACITE", password_input: "12345") } - let!(:client) { create(:client, symbol: "DATACITE.DATACITE", password_input: "12345", provider: provider) } + let(:provider) do + create(:provider, symbol: "DATACITE", password_input: "12345") + end + let!(:client) do + create( + :client, + symbol: "DATACITE.DATACITE", password_input: "12345", provider: provider, + ) + end context "account exists" do let(:params) { "username=#{client.symbol}" } diff --git a/spec/requests/works_spec.rb b/spec/requests/works_spec.rb index 89b3e32a5..253fa4a30 100644 --- a/spec/requests/works_spec.rb +++ b/spec/requests/works_spec.rb @@ -1,19 +1,58 @@ +# frozen_string_literal: true + require "rails_helper" describe WorksController, type: :request do let(:admin) { create(:provider, symbol: "ADMIN") } - let(:admin_bearer) { Client.generate_token(role_id: "staff_admin", uid: admin.symbol, password: admin.password) } - let(:admin_headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + admin_bearer } } + let(:admin_bearer) do + Client.generate_token( + role_id: "staff_admin", uid: admin.symbol, password: admin.password, + ) + end + let(:admin_headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + admin_bearer, + } + end let(:provider) { create(:provider, symbol: "DATACITE") } - let(:client) { create(:client, provider: provider, symbol: ENV["MDS_USERNAME"], password: ENV["MDS_PASSWORD"]) } + let(:client) do + create( + :client, + provider: provider, + symbol: ENV["MDS_USERNAME"], + password: ENV["MDS_PASSWORD"], + ) + end let!(:prefix) { create(:prefix, uid: "10.14454") } - let!(:client_prefix) { create(:client_prefix, client: client, prefix: prefix) } - let(:bearer) { Client.generate_token(role_id: "client_admin", uid: client.symbol, provider_id: provider.symbol.downcase, client_id: client.symbol.downcase, password: client.password) } - let(:headers) { { "HTTP_ACCEPT" => "application/vnd.api+json", "HTTP_AUTHORIZATION" => "Bearer " + bearer } } + let!(:client_prefix) do + create(:client_prefix, client: client, prefix: prefix) + end + let(:bearer) do + Client.generate_token( + role_id: "client_admin", + uid: client.symbol, + provider_id: provider.symbol.downcase, + client_id: client.symbol.downcase, + password: client.password, + ) + end + let(:headers) do + { + "HTTP_ACCEPT" => "application/vnd.api+json", + "HTTP_AUTHORIZATION" => "Bearer " + bearer, + } + end describe "GET /works", elasticsearch: true do - let!(:datacite_dois) { create_list(:doi, 3, client: client, event: "publish", type: "DataciteDoi") } + let!(:datacite_dois) do + create_list( + :doi, + 3, + client: client, event: "publish", type: "DataciteDoi", + ) + end before do DataciteDoi.import @@ -30,19 +69,31 @@ end describe "GET /works/:id" do - let!(:datacite_doi) { create(:doi, client: client, event: "publish", type: "DataciteDoi") } + let!(:datacite_doi) do + create(:doi, client: client, event: "publish", type: "DataciteDoi") + end context "when the record exists" do it "returns the work" do get "/works/#{datacite_doi.doi}" expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "doi")).to eq(datacite_doi.doi.downcase) + expect(json.dig("data", "attributes", "doi")).to eq( + datacite_doi.doi.downcase, + ) expect(json.dig("data", "attributes", "author").length).to eq(8) - expect(json.dig("data", "attributes", "author").first).to eq("family" => "Ollomo", "given" => "Benjamin") - expect(json.dig("data", "attributes", "title")).to eq("Data from: A new malaria agent in African hominids.") - expect(json.dig("data", "attributes", "description")).to eq("Data from: A new malaria agent in African hominids.") - expect(json.dig("data", "attributes", "container-title")).to eq("Dryad Digital Repository") + expect(json.dig("data", "attributes", "author").first).to eq( + "family" => "Ollomo", "given" => "Benjamin", + ) + expect(json.dig("data", "attributes", "title")).to eq( + "Data from: A new malaria agent in African hominids.", + ) + expect(json.dig("data", "attributes", "description")).to eq( + "Data from: A new malaria agent in African hominids.", + ) + expect(json.dig("data", "attributes", "container-title")).to eq( + "Dryad Digital Repository", + ) expect(json.dig("data", "attributes", "published")).to eq("2011") end end @@ -52,7 +103,14 @@ get "/works/10.5256/xxxx", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json).to eq("errors" => [{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json).to eq( + "errors" => [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end @@ -63,7 +121,14 @@ get "/works/#{datacite_doi.doi}", params: nil, session: headers expect(last_response.status).to eq(404) - expect(json).to eq("errors" => [{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json).to eq( + "errors" => [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end @@ -72,7 +137,9 @@ get "/works/#{datacite_doi.doi}" expect(last_response.status).to eq(200) - expect(json.dig("data", "attributes", "doi")).to eq(datacite_doi.doi.downcase) + expect(json.dig("data", "attributes", "doi")).to eq( + datacite_doi.doi.downcase, + ) end end @@ -83,7 +150,14 @@ get "/works/#{datacite_doi.doi}" expect(last_response.status).to eq(404) - expect(json).to eq("errors" => [{ "status" => "404", "title" => "The resource you are looking for doesn't exist." }]) + expect(json).to eq( + "errors" => [ + { + "status" => "404", + "title" => "The resource you are looking for doesn't exist.", + }, + ], + ) end end end diff --git a/spec/routing/clients_routing_spec.rb b/spec/routing/clients_routing_spec.rb index c701e05d6..34aac8675 100644 --- a/spec/routing/clients_routing_spec.rb +++ b/spec/routing/clients_routing_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ClientsController, type: :routing do diff --git a/spec/routing/dois_routing_spec.rb b/spec/routing/dois_routing_spec.rb index 5dc9d467b..6bad3d8fc 100644 --- a/spec/routing/dois_routing_spec.rb +++ b/spec/routing/dois_routing_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe DataciteDoisController, type: :routing do diff --git a/spec/routing/media_routing_spec.rb b/spec/routing/media_routing_spec.rb index 86f6eaf03..3760024ba 100644 --- a/spec/routing/media_routing_spec.rb +++ b/spec/routing/media_routing_spec.rb @@ -1,29 +1,49 @@ +# frozen_string_literal: true + require "rails_helper" describe MediaController, type: :routing do describe "routing" do it "routes to #index" do - expect(get: "dois/1/media").to route_to("media#index", datacite_doi_id: "1") + expect(get: "dois/1/media").to route_to( + "media#index", + datacite_doi_id: "1", + ) end it "routes to #show" do - expect(get: "dois/1/media/1").to route_to("media#show", datacite_doi_id: "1", id: "1") + expect(get: "dois/1/media/1").to route_to( + "media#show", + datacite_doi_id: "1", id: "1", + ) end it "routes to #create" do - expect(post: "dois/1/media").to route_to("media#create", datacite_doi_id: "1") + expect(post: "dois/1/media").to route_to( + "media#create", + datacite_doi_id: "1", + ) end it "routes to #update via PUT" do - expect(put: "dois/1/media/1").to route_to("media#update", datacite_doi_id: "1", id: "1") + expect(put: "dois/1/media/1").to route_to( + "media#update", + datacite_doi_id: "1", id: "1", + ) end it "routes to #update via PATCH" do - expect(patch: "dois/1/media/1").to route_to("media#update", datacite_doi_id: "1", id: "1") + expect(patch: "dois/1/media/1").to route_to( + "media#update", + datacite_doi_id: "1", id: "1", + ) end it "routes to #destroy" do - expect(delete: "dois/1/media/1").to route_to("media#destroy", datacite_doi_id: "1", id: "1") + expect(delete: "dois/1/media/1").to route_to( + "media#destroy", + datacite_doi_id: "1", id: "1", + ) end end end diff --git a/spec/routing/metadata_routing_spec.rb b/spec/routing/metadata_routing_spec.rb index e8a851e63..cbc374074 100644 --- a/spec/routing/metadata_routing_spec.rb +++ b/spec/routing/metadata_routing_spec.rb @@ -1,29 +1,49 @@ +# frozen_string_literal: true + require "rails_helper" RSpec.describe MetadataController, type: :routing do describe "routing" do it "routes to #index" do - expect(get: "dois/1/metadata").to route_to("metadata#index", datacite_doi_id: "1") + expect(get: "dois/1/metadata").to route_to( + "metadata#index", + datacite_doi_id: "1", + ) end it "routes to #show" do - expect(get: "dois/1/metadata/1").to route_to("metadata#show", datacite_doi_id: "1", id: "1") + expect(get: "dois/1/metadata/1").to route_to( + "metadata#show", + datacite_doi_id: "1", id: "1", + ) end it "routes to #create" do - expect(post: "dois/1/metadata").to route_to("metadata#create", datacite_doi_id: "1") + expect(post: "dois/1/metadata").to route_to( + "metadata#create", + datacite_doi_id: "1", + ) end it "routes to #update via PUT" do - expect(put: "dois/1/metadata/1").to route_to("metadata#update", datacite_doi_id: "1", id: "1") + expect(put: "dois/1/metadata/1").to route_to( + "metadata#update", + datacite_doi_id: "1", id: "1", + ) end it "routes to #update via PATCH" do - expect(patch: "dois/1/metadata/1").to route_to("metadata#update", datacite_doi_id: "1", id: "1") + expect(patch: "dois/1/metadata/1").to route_to( + "metadata#update", + datacite_doi_id: "1", id: "1", + ) end it "routes to #destroy" do - expect(delete: "dois/1/metadata/1").to route_to("metadata#destroy", datacite_doi_id: "1", id: "1") + expect(delete: "dois/1/metadata/1").to route_to( + "metadata#destroy", + datacite_doi_id: "1", id: "1", + ) end end end diff --git a/spec/routing/prefixes_routing_spec.rb b/spec/routing/prefixes_routing_spec.rb index 3f3a0f76f..7f0837f34 100644 --- a/spec/routing/prefixes_routing_spec.rb +++ b/spec/routing/prefixes_routing_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe PrefixesController, type: :routing do diff --git a/spec/routing/providers_routing_spec.rb b/spec/routing/providers_routing_spec.rb index 7c9553d94..f7b6530e8 100644 --- a/spec/routing/providers_routing_spec.rb +++ b/spec/routing/providers_routing_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe ProvidersController, type: :routing do diff --git a/spec/routing/repositories_routing_spec.rb b/spec/routing/repositories_routing_spec.rb index c2af1a2a5..2cbdec497 100644 --- a/spec/routing/repositories_routing_spec.rb +++ b/spec/routing/repositories_routing_spec.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" describe RepositoriesController, type: :routing do @@ -19,11 +21,17 @@ end it "routes to #update via PATCH" do - expect(patch: "/repositories/1").to route_to("repositories#update", id: "1") + expect(patch: "/repositories/1").to route_to( + "repositories#update", + id: "1", + ) end it "routes to #destroy" do - expect(delete: "/repositories/1").to route_to("repositories#destroy", id: "1") + expect(delete: "/repositories/1").to route_to( + "repositories#destroy", + id: "1", + ) end end end diff --git a/spec/support/database_cleaner_helper.rb b/spec/support/database_cleaner_helper.rb index 6698cdb3f..f9d325d8f 100644 --- a/spec/support/database_cleaner_helper.rb +++ b/spec/support/database_cleaner_helper.rb @@ -1,21 +1,15 @@ +# frozen_string_literal: true + RSpec.configure do |config| - config.before(:suite) do - DatabaseCleaner.clean_with(:truncation) - end + config.before(:suite) { DatabaseCleaner.clean_with(:truncation) } - config.before(:each) do - DatabaseCleaner.strategy = :transaction - end + config.before(:each) { DatabaseCleaner.strategy = :transaction } config.before(:each, js: true) do DatabaseCleaner.strategy = :truncation, { pre_count: true } end - config.before(:each) do - DatabaseCleaner.start - end + config.before(:each) { DatabaseCleaner.start } - config.after(:each) do - DatabaseCleaner.clean - end + config.after(:each) { DatabaseCleaner.clean } end diff --git a/spec/support/elasticsearch_helper.rb b/spec/support/elasticsearch_helper.rb index dd22b5494..f9b09dcdd 100644 --- a/spec/support/elasticsearch_helper.rb +++ b/spec/support/elasticsearch_helper.rb @@ -1,5 +1,18 @@ +# frozen_string_literal: true + ## https://github.com/elastic/elasticsearch-ruby/issues/462 -SEARCHABLE_MODELS = [Client, Provider, DataciteDoi, OtherDoi, Doi, Event, Activity, Prefix, ClientPrefix, ProviderPrefix].freeze +SEARCHABLE_MODELS = [ + Client, + Provider, + DataciteDoi, + OtherDoi, + Doi, + Event, + Activity, + Prefix, + ClientPrefix, + ProviderPrefix, +].freeze RSpec.configure do |config| config.around :example, elasticsearch: true do |example| @@ -8,8 +21,16 @@ model.create_template end - Elasticsearch::Model.client.indices.delete index: "#{model.index_name}_v1" if Elasticsearch::Model.client.indices.exists? index: "#{model.index_name}_v1" - Elasticsearch::Model.client.indices.delete index: "#{model.index_name}_v2" if Elasticsearch::Model.client.indices.exists? index: "#{model.index_name}_v2" + if Elasticsearch::Model.client.indices.exists? index: + "#{model.index_name}_v1" + Elasticsearch::Model.client.indices.delete index: + "#{model.index_name}_v1" + end + if Elasticsearch::Model.client.indices.exists? index: + "#{model.index_name}_v2" + Elasticsearch::Model.client.indices.delete index: + "#{model.index_name}_v2" + end model.__elasticsearch__.create_index! force: true end @@ -17,7 +38,9 @@ example.run SEARCHABLE_MODELS.each do |model| - Elasticsearch::Model.client.indices.delete index: model.index_name if Elasticsearch::Model.client.indices.exists? index: model.index_name + if Elasticsearch::Model.client.indices.exists? index: model.index_name + Elasticsearch::Model.client.indices.delete index: model.index_name + end end end end diff --git a/spec/support/job_helper.rb b/spec/support/job_helper.rb index c46fb7e43..3f7ed5a0f 100644 --- a/spec/support/job_helper.rb +++ b/spec/support/job_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module JobHelper include ActiveJob::TestHelper end diff --git a/spec/support/request_helper.rb b/spec/support/request_helper.rb index 27f3be74f..aa195f822 100644 --- a/spec/support/request_helper.rb +++ b/spec/support/request_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + module RequestHelper # Parse JSON response to ruby hash def json diff --git a/spec/support/shared_contexts/rake.rb b/spec/support/shared_contexts/rake.rb index b7e434b0e..285291cb6 100644 --- a/spec/support/shared_contexts/rake.rb +++ b/spec/support/shared_contexts/rake.rb @@ -1,16 +1,22 @@ +# frozen_string_literal: true + # From http://robots.thoughtbot.com/post/11957424161/test-rake-tasks-like-a-boss require "rake" shared_context "rake" do - let(:rake) { Rake::Application.new } + let(:rake) { Rake::Application.new } let(:task_name) { self.class.top_level_description.split("[").first } - let(:regexp) { Regexp.new('\[([\w,]+)\]') } - let(:task_args) { regexp.match(self.class.top_level_description)[1].split(",") } + let(:regexp) { Regexp.new("\[([\w,]+)\]") } + let(:task_args) do + regexp.match(self.class.top_level_description)[1].split(",") + end let(:task_path) { "lib/tasks/#{task_name.split(':').first}" } - subject { rake[task_name] } + subject { rake[task_name] } def loaded_files_excluding_current_rake_file - $LOADED_FEATURES.reject { |file| file == Rails.root.join("#{task_path}.rake").to_s } + $LOADED_FEATURES.reject do |file| + file == Rails.root.join("#{task_path}.rake").to_s + end end before do diff --git a/spec/support/shared_examples_for_sti.rb b/spec/support/shared_examples_for_sti.rb index 26bb64cda..3c13c6419 100644 --- a/spec/support/shared_examples_for_sti.rb +++ b/spec/support/shared_examples_for_sti.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + require "rails_helper" shared_examples "an STI class" do diff --git a/spec/support/task_helper.rb b/spec/support/task_helper.rb index de694fd4c..cfe2a7051 100644 --- a/spec/support/task_helper.rb +++ b/spec/support/task_helper.rb @@ -1,3 +1,5 @@ +# frozen_string_literal: true + # File: spec/support/tasks.rb require "rake" @@ -15,7 +17,7 @@ module TaskExampleGroup extend ActiveSupport::Concern included do - let(:task_name) { self.class.top_level_description.sub(/\Arake /, "") } + let(:task_name) { self.class.top_level_description.delete_prefix("rake ") } let(:tasks) { Rake::Task } # Make the Rake task available as `task` in your examples: @@ -31,7 +33,5 @@ module TaskExampleGroup config.include TaskExampleGroup, type: :task - config.before(:suite) do - Rails.application.load_tasks - end + config.before(:suite) { Rails.application.load_tasks } end