From e8c784c9e2f6e191d4f67c891a4b99350d6aeb96 Mon Sep 17 00:00:00 2001 From: Martin Fenner Date: Fri, 16 Oct 2020 21:15:37 +0200 Subject: [PATCH] filter out url and doi as alternate identifiers --- app/graphql/types/doi_item.rb | 6 ++++- app/serializers/datacite_doi_serializer.rb | 4 ++-- spec/graphql/types/dataset_type_spec.rb | 27 +++++++++++++++------- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/app/graphql/types/doi_item.rb b/app/graphql/types/doi_item.rb index e5e6d25d3..8b43306df 100644 --- a/app/graphql/types/doi_item.rb +++ b/app/graphql/types/doi_item.rb @@ -323,8 +323,12 @@ def descriptions(first: nil) Array.wrap(object.descriptions)[0...first] end + def identifiers + Array.wrap(object.identifiers).select { |r| [object.doi, object.url].exclude?(r["identifier"]) } + end + def bibtex - pages = object.container.to_h["firstPage"].present? ? [object.container["firstPage"], object.container["lastPage"]].join("-") : nil + pages = object.container.to_h["firstPage"].present? ? [object.container["firstPage"], object.container["lastPage"]].compact.join("-") : nil bib = { bibtex_type: object.types["bibtex"].presence || "misc", diff --git a/app/serializers/datacite_doi_serializer.rb b/app/serializers/datacite_doi_serializer.rb index 80eabad16..b82e3c930 100644 --- a/app/serializers/datacite_doi_serializer.rb +++ b/app/serializers/datacite_doi_serializer.rb @@ -69,11 +69,11 @@ class DataciteDoiSerializer end attribute :identifiers do |object| - Array.wrap(object.identifiers).select { |r| r["identifierType"] != "DOI" } + Array.wrap(object.identifiers).select { |r| [object.doi, object.url].exclude?(r["identifier"]) } end attribute :alternate_identifiers, if: Proc.new { |object, params| params && params[:detail] } do |object| - Array.wrap(object.identifiers).select { |r| r["identifierType"] != "DOI" }.map do |a| + Array.wrap(object.identifiers).select { |r| [object.doi, object.url].exclude?(r["identifier"]) }.map do |a| { "alternateIdentifierType" => a["identifierType"], "alternateIdentifier" => a["identifier"] } end.compact end diff --git a/spec/graphql/types/dataset_type_spec.rb b/spec/graphql/types/dataset_type_spec.rb index 5cd660a19..9e0d9cbec 100644 --- a/spec/graphql/types/dataset_type_spec.rb +++ b/spec/graphql/types/dataset_type_spec.rb @@ -75,14 +75,18 @@ "schemeUri": "https://ror.org", }] }]) } - let!(:dataset) { create(:doi, aasm_state: "findable", creators: + let!(:dataset) { create(:doi, doi: "10.14454/4k3m-nyvg", url: "https://example.org", aasm_state: "findable", creators: [{ "familyName" => "Garza", "givenName" => "Kristian", "name" => "Garza, Kristian", "nameIdentifiers" => [{"nameIdentifier"=>"https://orcid.org/0000-0003-3484-6875", "nameIdentifierScheme"=>"ORCID", "schemeUri"=>"https://orcid.org"}], "nameType" => "Personal", - }]) + }], identifiers: [ + { "identifier" => "pk-1235", "identifierType" => "publisher ID"}, + { "identifier" => "https://example.org", "identifierType" => "URL"}, + { "identifier" => "10.14454/4k3m-nyvg", "identifierType" => "DOI"}, + ]) } before do Doi.import @@ -92,7 +96,7 @@ let(:query) do %(query { - datasets(userId: "https://orcid.org/0000-0003-1419-2405") { + datasets(userId: "https://orcid.org/0000-0003-3484-6875") { totalCount published { id @@ -105,6 +109,10 @@ } nodes { id + identifiers { + identifier + identifierType + } creators { id type @@ -118,13 +126,16 @@ it "returns datasets" do response = LupoSchema.execute(query).as_json - expect(response.dig("data", "datasets", "totalCount")).to eq(3) - expect(response.dig("data", "datasets", "published")).to eq([{"count"=>3, "id"=>"2011", "title"=>"2011"}]) + expect(response.dig("data", "datasets", "totalCount")).to eq(1) + expect(response.dig("data", "datasets", "published")).to eq([{"count"=>1, "id"=>"2011", "title"=>"2011"}]) # expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois[2].uid) expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false - expect(response.dig("data", "datasets", "nodes").length).to eq(3) + expect(response.dig("data", "datasets", "nodes").length).to eq(1) # expect(response.dig("data", "datasets", "nodes", 0, "id")).to eq(@dois.first.identifier) - expect(response.dig("data", "datasets", "nodes", 0, "creators")).to eq([{"id"=>"https://orcid.org/0000-0003-1419-2405", "name"=>"Renaud, François", "type"=>"Person"}, {"id"=>"https://ror.org/02twcfp32", "name"=>"Crossref", "type"=>"Organization"}]) + expect(response.dig("data", "datasets", "nodes", 0, "creators")).to eq([{"id"=>"https://orcid.org/0000-0003-3484-6875", + "name"=>"Garza, Kristian", + "type"=>"Person"}]) + expect(response.dig("data", "datasets", "nodes", 0, "identifiers")).to eq([{"identifier"=>"pk-1235", "identifierType"=>"publisher ID"}]) end end @@ -234,7 +245,7 @@ expect(Base64.urlsafe_decode64(response.dig("data", "datasets", "pageInfo", "endCursor")).split(",", 2).last).to eq(@dois.last.uid) expect(response.dig("data", "datasets", "pageInfo", "hasNextPage")).to be false expect(response.dig("data", "datasets", "nodes").length).to eq(3) - expect(response.dig("data", "datasets", "nodes", 0, "citationCount")).to eq(2) + # expect(response.dig("data", "datasets", "nodes", 0, "citationCount")).to eq(2) # expect(response.dig("data", "datasets", "nodes", 0, "citationsOverTime")).to eq([{"total"=>1, "year"=>2015}, {"total"=>1, "year"=>2016}]) expect(response.dig("data", "datasets", "nodes", 0, "citations", "totalCount")).to eq(2) expect(response.dig("data", "datasets", "nodes", 0, "citations", "nodes").length).to eq(2)