mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge remote-tracking branch 'upstream/main' into DS-2058-7x
This commit is contained in:
@@ -2,6 +2,7 @@ version: '3.7'
|
||||
networks:
|
||||
dspacenet:
|
||||
services:
|
||||
# DSpace (backend) webapp container
|
||||
dspace:
|
||||
container_name: dspace
|
||||
image: "${DOCKER_OWNER:-dspace}/dspace:${DSPACE_VER:-dspace-7_x-test}"
|
||||
@@ -33,10 +34,12 @@ services:
|
||||
while (!</dev/tcp/dspacedb/5432) > /dev/null 2>&1; do sleep 1; done;
|
||||
/dspace/bin/dspace database migrate
|
||||
catalina.sh run
|
||||
# DSpace database container
|
||||
dspacedb:
|
||||
container_name: dspacedb
|
||||
environment:
|
||||
PGDATA: /pgdata
|
||||
# Uses a custom Postgres image with pgcrypto installed
|
||||
image: dspace/dspace-postgres-pgcrypto
|
||||
networks:
|
||||
dspacenet:
|
||||
@@ -47,9 +50,11 @@ services:
|
||||
tty: true
|
||||
volumes:
|
||||
- pgdata:/pgdata
|
||||
# DSpace Solr container
|
||||
dspacesolr:
|
||||
container_name: dspacesolr
|
||||
image: dspace/dspace-solr
|
||||
# Uses official Solr image at https://hub.docker.com/_/solr/
|
||||
image: solr:8.8
|
||||
networks:
|
||||
dspacenet:
|
||||
ports:
|
||||
@@ -57,15 +62,27 @@ services:
|
||||
target: 8983
|
||||
stdin_open: true
|
||||
tty: true
|
||||
working_dir: /var/solr/data
|
||||
volumes:
|
||||
- solr_authority:/opt/solr/server/solr/authority/data
|
||||
- solr_oai:/opt/solr/server/solr/oai/data
|
||||
- solr_search:/opt/solr/server/solr/search/data
|
||||
- solr_statistics:/opt/solr/server/solr/statistics/data
|
||||
# Mount our local Solr core configs so that they are available as Solr configsets on container
|
||||
- ./dspace/solr/authority:/opt/solr/server/solr/configsets/authority
|
||||
- ./dspace/solr/oai:/opt/solr/server/solr/configsets/oai
|
||||
- ./dspace/solr/search:/opt/solr/server/solr/configsets/search
|
||||
- ./dspace/solr/statistics:/opt/solr/server/solr/configsets/statistics
|
||||
# Keep Solr data directory between reboots
|
||||
- solr_data:/var/solr/data
|
||||
# Initialize all DSpace Solr cores using the mounted local configsets (see above), then start Solr
|
||||
entrypoint:
|
||||
- /bin/bash
|
||||
- '-c'
|
||||
- |
|
||||
init-var-solr
|
||||
precreate-core authority /opt/solr/server/solr/configsets/authority
|
||||
precreate-core oai /opt/solr/server/solr/configsets/oai
|
||||
precreate-core search /opt/solr/server/solr/configsets/search
|
||||
precreate-core statistics /opt/solr/server/solr/configsets/statistics
|
||||
exec solr -f
|
||||
volumes:
|
||||
assetstore:
|
||||
pgdata:
|
||||
solr_authority:
|
||||
solr_oai:
|
||||
solr_search:
|
||||
solr_statistics:
|
||||
solr_data:
|
||||
|
@@ -376,6 +376,48 @@
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Bouncycastle brought in via solr-cell -->
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer version of Jetty in our parent POM & via Solr -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-alpn-java-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty.http2</groupId>
|
||||
<artifactId>http2-server</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- Jetty is needed to run Handle Server -->
|
||||
@@ -535,6 +577,21 @@
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-solrj</artifactId>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
|
||||
<!-- The following Solr / Lucene dependencies also support integration tests -->
|
||||
@@ -544,18 +601,12 @@
|
||||
<scope>test</scope>
|
||||
<version>${solr.client.version}</version>
|
||||
<exclusions>
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-continuation</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
@@ -564,104 +615,33 @@
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-jmx</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-rewrite</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-security</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-server</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.solr</groupId>
|
||||
<artifactId>solr-cell</artifactId>
|
||||
<exclusions>
|
||||
<!-- Newer versions provided in our parent POM -->
|
||||
<!-- Newer version brought in by opencsv -->
|
||||
<exclusion>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.ow2.asm</groupId>
|
||||
<artifactId>asm-commons</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcpkix-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
<artifactId>bcprov-jdk15on</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-xml</artifactId>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-text</artifactId>
|
||||
</exclusion>
|
||||
<!-- Newer Jetty version brought in via Parent POM -->
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-http</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlet</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-webapp</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-deploy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-continuation</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-servlets</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-io</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.eclipse.jetty</groupId>
|
||||
<artifactId>jetty-security</artifactId>
|
||||
<artifactId>jetty-util</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
@@ -669,8 +649,11 @@
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-core</artifactId>
|
||||
</dependency>
|
||||
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
|
||||
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
|
||||
<!-- Used for full-text indexing with Solr -->
|
||||
<dependency>
|
||||
<groupId>org.apache.tika</groupId>
|
||||
<artifactId>tika-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-icu</artifactId>
|
||||
@@ -689,7 +672,6 @@
|
||||
<dependency>
|
||||
<groupId>org.apache.xmlbeans</groupId>
|
||||
<artifactId>xmlbeans</artifactId>
|
||||
<version>2.6.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@@ -8,9 +8,7 @@
|
||||
|
||||
/**
|
||||
* <p>SFX/OpenURL link server support.</p>
|
||||
*
|
||||
* @see org.dspace.app.webui.jsptag.SFXLinkTag
|
||||
* @see org.dspace.app.xmlui.aspect.artifactbrowser.ItemViewer
|
||||
*
|
||||
*/
|
||||
|
||||
package org.dspace.app.sfx;
|
||||
|
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA journal
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
private String title;
|
||||
|
||||
private String issn;
|
||||
|
||||
private String zetopub;
|
||||
|
||||
private String romeopub;
|
||||
|
||||
public SHERPAJournal(String title, String issn, String zetopub,
|
||||
String romeopub) {
|
||||
super();
|
||||
this.title = title;
|
||||
this.issn = issn;
|
||||
this.zetopub = zetopub;
|
||||
this.romeopub = romeopub;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public String getIssn() {
|
||||
return issn;
|
||||
}
|
||||
|
||||
public String getZetopub() {
|
||||
return zetopub;
|
||||
}
|
||||
|
||||
public String getRomeopub() {
|
||||
return romeopub;
|
||||
}
|
||||
|
||||
}
|
@@ -1,175 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* POJO representation for a SHERPA Publisher record
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
|
||||
private String id;
|
||||
|
||||
private String name;
|
||||
|
||||
private String alias;
|
||||
|
||||
private String homeurl;
|
||||
|
||||
private String prearchiving;
|
||||
|
||||
private List<String> prerestriction;
|
||||
|
||||
private String postarchiving;
|
||||
|
||||
private List<String> postrestriction;
|
||||
|
||||
private String pubarchiving;
|
||||
|
||||
private List<String> pubrestriction;
|
||||
|
||||
private List<String> condition;
|
||||
|
||||
private String paidaccessurl;
|
||||
|
||||
private String paidaccessname;
|
||||
|
||||
private String paidaccessnotes;
|
||||
|
||||
private List<String[]> copyright;
|
||||
|
||||
private String romeocolour;
|
||||
|
||||
private String dateadded;
|
||||
|
||||
private String dateupdated;
|
||||
|
||||
public SHERPAPublisher(String id, String name, String alias, String homeurl,
|
||||
String prearchiving, List<String> prerestriction,
|
||||
String postarchiving, List<String> postrestriction,
|
||||
String pubarchiving, List<String> pubrestriction,
|
||||
List<String> condition, String paidaccessurl,
|
||||
String paidaccessname, String paidaccessnotes,
|
||||
List<String[]> copyright, String romeocolour, String datedded,
|
||||
String dateupdated) {
|
||||
this.id = id;
|
||||
|
||||
this.name = name;
|
||||
|
||||
this.alias = alias;
|
||||
|
||||
this.homeurl = homeurl;
|
||||
|
||||
this.prearchiving = prearchiving;
|
||||
|
||||
this.prerestriction = prerestriction;
|
||||
|
||||
this.postarchiving = postarchiving;
|
||||
|
||||
this.postrestriction = postrestriction;
|
||||
|
||||
this.pubarchiving = pubarchiving;
|
||||
|
||||
this.pubrestriction = pubrestriction;
|
||||
|
||||
this.condition = condition;
|
||||
|
||||
this.paidaccessurl = paidaccessurl;
|
||||
|
||||
this.paidaccessname = paidaccessname;
|
||||
|
||||
this.paidaccessnotes = paidaccessnotes;
|
||||
|
||||
this.copyright = copyright;
|
||||
|
||||
this.romeocolour = romeocolour;
|
||||
|
||||
this.dateadded = datedded;
|
||||
|
||||
this.dateupdated = dateupdated;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getAlias() {
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String getHomeurl() {
|
||||
return homeurl;
|
||||
}
|
||||
|
||||
public String getPrearchiving() {
|
||||
return prearchiving;
|
||||
}
|
||||
|
||||
public List<String> getPrerestriction() {
|
||||
return prerestriction;
|
||||
}
|
||||
|
||||
public String getPostarchiving() {
|
||||
return postarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPostrestriction() {
|
||||
return postrestriction;
|
||||
}
|
||||
|
||||
public String getPubarchiving() {
|
||||
return pubarchiving;
|
||||
}
|
||||
|
||||
public List<String> getPubrestriction() {
|
||||
return pubrestriction;
|
||||
}
|
||||
|
||||
public List<String> getCondition() {
|
||||
return condition;
|
||||
}
|
||||
|
||||
public String getPaidaccessurl() {
|
||||
return paidaccessurl;
|
||||
}
|
||||
|
||||
public String getPaidaccessname() {
|
||||
return paidaccessname;
|
||||
}
|
||||
|
||||
public String getPaidaccessnotes() {
|
||||
return paidaccessnotes;
|
||||
}
|
||||
|
||||
public List<String[]> getCopyright() {
|
||||
return copyright;
|
||||
}
|
||||
|
||||
public String getRomeocolour() {
|
||||
return romeocolour;
|
||||
}
|
||||
|
||||
public String getDatedded() {
|
||||
return dateadded;
|
||||
}
|
||||
|
||||
public String getDateupdated() {
|
||||
return dateupdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the id
|
||||
* @return the id value of this SHERPAPublisher
|
||||
*/
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
}
|
@@ -1,206 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* JAVA representation for a SHERPA API Response
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAResponse.class);
|
||||
|
||||
private int numHits;
|
||||
|
||||
private String message;
|
||||
|
||||
private String license;
|
||||
|
||||
private String licenseURL;
|
||||
|
||||
private String disclaimer;
|
||||
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
public SHERPAResponse(InputStream xmlData) {
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
Document inDoc = db.parse(xmlData);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
Element headersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"header");
|
||||
Element journalsElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"journals");
|
||||
Element publishersElement = XMLUtils.getSingleElement(xmlRoot,
|
||||
"publishers");
|
||||
|
||||
String numhitsString = XMLUtils.getElementValue(headersElement, "numhits");
|
||||
if (StringUtils.isNotBlank(numhitsString)) {
|
||||
numHits = Integer.parseInt(numhitsString);
|
||||
} else {
|
||||
numHits = 0;
|
||||
}
|
||||
message = XMLUtils.getElementValue(headersElement, "message");
|
||||
|
||||
license = XMLUtils.getElementValue(headersElement, "license");
|
||||
licenseURL = XMLUtils.getElementValue(headersElement, "licenseurl");
|
||||
disclaimer = XMLUtils.getElementValue(headersElement, "disclaimer");
|
||||
|
||||
List<Element> journalsList = XMLUtils.getElementList(
|
||||
journalsElement, "journal");
|
||||
List<Element> publishersList = XMLUtils.getElementList(
|
||||
publishersElement, "publisher");
|
||||
|
||||
if (journalsList != null) {
|
||||
journals = new LinkedList<SHERPAJournal>();
|
||||
for (Element journalElement : journalsList) {
|
||||
journals.add(new SHERPAJournal(
|
||||
XMLUtils.getElementValue(journalElement, "jtitle"),
|
||||
XMLUtils.getElementValue(journalElement, "issn"),
|
||||
XMLUtils.getElementValue(journalElement, "zetopub"),
|
||||
XMLUtils.getElementValue(journalElement, "romeopub")));
|
||||
}
|
||||
}
|
||||
|
||||
if (publishersList != null) {
|
||||
publishers = new LinkedList<SHERPAPublisher>();
|
||||
for (Element publisherElement : publishersList) {
|
||||
Element preprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "preprints");
|
||||
Element preprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"prerestrictions");
|
||||
|
||||
Element postprintsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "postprints");
|
||||
Element postprintsRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"postrestrictions");
|
||||
|
||||
Element pdfversionElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "pdfversion");
|
||||
Element pdfversionRestrictionElement = XMLUtils
|
||||
.getSingleElement(publisherElement,
|
||||
"pdfrestrictions");
|
||||
|
||||
Element conditionsElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "conditions");
|
||||
Element paidaccessElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "paidaccess");
|
||||
|
||||
Element copyrightlinksElement = XMLUtils.getSingleElement(
|
||||
publisherElement, "copyrightlinks");
|
||||
publishers
|
||||
.add(new SHERPAPublisher(publisherElement.getAttribute("id"), XMLUtils.getElementValue(
|
||||
publisherElement, "name"),
|
||||
XMLUtils.getElementValue(publisherElement,
|
||||
"alias"), XMLUtils.getElementValue(
|
||||
publisherElement, "homeurl"),
|
||||
|
||||
XMLUtils.getElementValue(preprintsElement,
|
||||
"prearchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
preprintsRestrictionElement,
|
||||
"prerestriction"),
|
||||
|
||||
XMLUtils.getElementValue(postprintsElement,
|
||||
"postarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
postprintsRestrictionElement,
|
||||
"postrestriction"),
|
||||
|
||||
XMLUtils.getElementValue(pdfversionElement,
|
||||
"pdfarchiving"),
|
||||
XMLUtils.getElementValueList(
|
||||
pdfversionRestrictionElement,
|
||||
"pdfrestriction"),
|
||||
|
||||
XMLUtils
|
||||
.getElementValueList(
|
||||
conditionsElement,
|
||||
"condition"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessurl"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessname"), XMLUtils
|
||||
.getElementValue(paidaccessElement,
|
||||
"paidaccessnotes"),
|
||||
XMLUtils.getElementValueArrayList(
|
||||
copyrightlinksElement,
|
||||
"copyrightlink",
|
||||
"copyrightlinktext",
|
||||
"copyrightlinkurl"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"romeocolour"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateadded"), XMLUtils
|
||||
.getElementValue(publisherElement,
|
||||
"dateupdated")));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error parsing SHERPA API Response", e);
|
||||
}
|
||||
}
|
||||
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public String getLicense() {
|
||||
return license;
|
||||
}
|
||||
|
||||
public String getLicenseURL() {
|
||||
return licenseURL;
|
||||
}
|
||||
|
||||
public String getDisclaimer() {
|
||||
return disclaimer;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
|
||||
public int getNumHits() {
|
||||
return numHits;
|
||||
}
|
||||
}
|
@@ -7,6 +7,13 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
@@ -16,24 +23,51 @@ import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
|
||||
* for SHERPASubmitService.
|
||||
* Note, this service is ported from DSpace 6 for the ability to search policies by ISSN
|
||||
* There are also new DataProvider implementations provided for use as 'external sources'
|
||||
* of journal and publisher data
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAService {
|
||||
private CloseableHttpClient client = null;
|
||||
|
||||
private int maxNumberOfTries;
|
||||
private long sleepBetweenTimeouts;
|
||||
private int timeout = 5000;
|
||||
private String endpoint = "https://v2.sherpa.ac.uk/cgi/retrieve";
|
||||
private String apiKey = null;
|
||||
|
||||
/** log4j category */
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAService.class);
|
||||
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* log4j category
|
||||
* Create a new HTTP builder with sensible defaults in constructor
|
||||
*/
|
||||
private static final Logger log = LogManager.getLogger(SHERPAService.class);
|
||||
|
||||
public SHERPAService() {
|
||||
// Set configuration service
|
||||
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Get endoint and API key from configuration
|
||||
endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||
// not to hammer the SHERPA service too much.
|
||||
@@ -43,64 +77,106 @@ public class SHERPAService {
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search the SHERPA v2 API for a journal policy data using the supplied ISSN.
|
||||
* If the API key is missing, or the HTTP response is non-OK or does not complete
|
||||
* successfully, a simple error response will be returned.
|
||||
* Otherwise, the response body will be passed to SHERPAResponse for parsing as JSON
|
||||
* and the final result returned to the calling method
|
||||
* @param query ISSN string to pass in an "issn equals" API query
|
||||
* @return SHERPAResponse containing an error or journal policies
|
||||
*/
|
||||
public SHERPAResponse searchByJournalISSN(String query) {
|
||||
ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
String endpoint = configurationService.getProperty("sherpa.romeo.url");
|
||||
String apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
return performRequest("publication", "issn", "equals", query, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* but the return object here must be a SHERPAPublisherResponse not the journal-centric SHERPAResponse
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAPublisherResponse object
|
||||
*/
|
||||
public SHERPAPublisherResponse performPublisherRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAPublisherResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
SHERPAPublisherResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
}
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("issn", query);
|
||||
uriBuilder.addParameter("versions", "all");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("ak", apiKey);
|
||||
}
|
||||
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
// Execute the method.
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
sherpaResponse = new SHERPAResponse(responseBody.getContent());
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse =
|
||||
new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAPublisherResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAPublisherResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
@@ -109,13 +185,218 @@ public class SHERPAService {
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAPublisherResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform an API request to the SHERPA v2 API - this could be a search or a get for any entity type
|
||||
* For more information about the type, field and predicate arguments, see the SHERPA v2 API documentation
|
||||
* @param type entity type eg "publication" or "publisher"
|
||||
* @param field field eg "issn" or "title"
|
||||
* @param predicate predicate eg "equals" or "contains-word"
|
||||
* @param value the actual value to search for (eg an ISSN or partial title)
|
||||
* @param start start / offset of search results
|
||||
* @param limit maximum search results to return
|
||||
* @return SHERPAResponse object
|
||||
*/
|
||||
public SHERPAResponse performRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
// API Key is *required* for v2 API calls
|
||||
if (null == apiKey) {
|
||||
log.error("SHERPA ROMeO API Key missing: please register for an API key and set sherpa.romeo.apikey");
|
||||
return new SHERPAResponse("SHERPA/RoMEO configuration invalid or missing");
|
||||
}
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
int numberOfTries = 0;
|
||||
|
||||
while (numberOfTries < maxNumberOfTries && sherpaResponse == null) {
|
||||
numberOfTries++;
|
||||
|
||||
log.debug(String.format(
|
||||
"Trying to contact SHERPA/RoMEO - attempt %d of %d; timeout is %d; sleep between timeouts is %d",
|
||||
numberOfTries,
|
||||
maxNumberOfTries,
|
||||
timeout,
|
||||
sleepBetweenTimeouts));
|
||||
|
||||
try {
|
||||
Thread.sleep(sleepBetweenTimeouts);
|
||||
|
||||
// Construct a default HTTP method (first result)
|
||||
method = constructHttpGet(type, field, predicate, value, start, limit);
|
||||
|
||||
// Execute the method
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
log.debug(response.getStatusLine().getStatusCode() + ": "
|
||||
+ response.getStatusLine().getReasonPhrase());
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
String errorBody = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
log.error("Error from SHERPA HTTP request: " + errorBody);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
|
||||
if (null != responseBody) {
|
||||
log.debug("Non-null SHERPA resonse received for query of " + value);
|
||||
InputStream content = null;
|
||||
try {
|
||||
content = responseBody.getContent();
|
||||
sherpaResponse = new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
} catch (IOException e) {
|
||||
log.error("Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage(), e);
|
||||
} finally {
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.debug("Empty SHERPA response body for query on " + value);
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
String errorMessage = "Error building SHERPA v2 API URI: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (IOException e) {
|
||||
String errorMessage = "Encountered exception while contacting SHERPA/RoMEO: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} catch (InterruptedException e) {
|
||||
String errorMessage = "Encountered exception while sleeping thread: " + e.getMessage();
|
||||
log.error(errorMessage, e);
|
||||
sherpaResponse = new SHERPAResponse(errorMessage);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
log.debug("SHERPA response is still null");
|
||||
sherpaResponse = new SHERPAResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
|
||||
// Return the final response
|
||||
return sherpaResponse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query with default start, limit
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @return HttpGet method which can then be executed by the client
|
||||
* @throws URISyntaxException if the URL build fails
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value)
|
||||
throws URISyntaxException {
|
||||
return constructHttpGet(type, field, predicate, value, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct HTTP GET object for a "field,predicate,value" query
|
||||
* eg. "title","contains-word","Lancet" or "issn","equals","1234-1234"
|
||||
* @param field the field (issn, title, etc)
|
||||
* @param predicate the predicate (contains-word, equals, etc - see API docs)
|
||||
* @param value the query value itself
|
||||
* @param start row offset
|
||||
* @param limit number of results to return
|
||||
* @return HttpGet object to be executed by the client
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public HttpGet constructHttpGet(String type, String field, String predicate, String value, int start, int limit)
|
||||
throws URISyntaxException {
|
||||
// Sanitise query string (strip some characters) field, predicate and value
|
||||
if (null == type) {
|
||||
type = "publication";
|
||||
}
|
||||
field = SHERPAUtils.sanitiseQuery(field);
|
||||
predicate = SHERPAUtils.sanitiseQuery(predicate);
|
||||
value = SHERPAUtils.sanitiseQuery(value);
|
||||
type = SHERPAUtils.sanitiseQuery(type);
|
||||
|
||||
// Build URL based on search query
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
uriBuilder.addParameter("item-type", type);
|
||||
uriBuilder.addParameter("filter", "[[\"" + field + "\",\"" + predicate + "\",\"" + value + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
// Set optional start (offset) and limit parameters
|
||||
if (start >= 0) {
|
||||
uriBuilder.addParameter("offset", String.valueOf(start));
|
||||
}
|
||||
if (limit > 0) {
|
||||
uriBuilder.addParameter("limit", String.valueOf(limit));
|
||||
}
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
|
||||
log.debug("SHERPA API URL: " + uriBuilder.toString());
|
||||
|
||||
// Create HTTP GET object
|
||||
HttpGet method = new HttpGet(uriBuilder.build());
|
||||
|
||||
// Set connection parameters
|
||||
int timeout = 5000;
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
|
||||
return method;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare the API query for execution by the HTTP client
|
||||
* @param query ISSN query string
|
||||
* @param endpoint API endpoint (base URL)
|
||||
* @param apiKey API key parameter
|
||||
* @return URI object
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
public URI prepareQuery(String query, String endpoint, String apiKey) throws URISyntaxException {
|
||||
// Sanitise query string
|
||||
query = SHERPAUtils.sanitiseQuery(query);
|
||||
|
||||
// Instantiate URI builder
|
||||
URIBuilder uriBuilder = new URIBuilder(endpoint);
|
||||
|
||||
// Build URI parameters from supplied values
|
||||
uriBuilder.addParameter("item-type", "publication");
|
||||
|
||||
// Log warning if no query is supplied
|
||||
if (null == query) {
|
||||
log.warn("No ISSN supplied as query string for SHERPA service search");
|
||||
}
|
||||
uriBuilder.addParameter("filter", "[[\"issn\",\"equals\",\"" + query + "\"]]");
|
||||
uriBuilder.addParameter("format", "Json");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("api-key", apiKey);
|
||||
}
|
||||
log.debug("Would search SHERPA endpoint with " + uriBuilder.toString());
|
||||
|
||||
// Return final built URI
|
||||
return uriBuilder.build();
|
||||
}
|
||||
|
||||
public void setMaxNumberOfTries(int maxNumberOfTries) {
|
||||
this.maxNumberOfTries = maxNumberOfTries;
|
||||
}
|
||||
|
@@ -7,49 +7,111 @@
|
||||
*/
|
||||
package org.dspace.app.sherpa.submit;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
|
||||
/**
|
||||
* SHERPASubmitService is
|
||||
* @see
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASubmitService {
|
||||
private SHERPAService sherpaService;
|
||||
|
||||
private SHERPASubmitConfigurationService configuration;
|
||||
/**
|
||||
* Spring beans for configuration and API service
|
||||
*/
|
||||
protected SHERPAService sherpaService;
|
||||
protected SHERPASubmitConfigurationService configuration;
|
||||
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPASubmitService.class);
|
||||
|
||||
/**
|
||||
* Setter for configuration (from Spring)
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param configuration
|
||||
*/
|
||||
public void setConfiguration(SHERPASubmitConfigurationService configuration) {
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter for SHERPA service, reponsible for actual HTTP API calls
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param sherpaService
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
|
||||
public SHERPAResponse searchRelatedJournals(Context context, Item item) {
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the ISSNs in the item.
|
||||
* Rather than a 'search' query for any/all ISSNs, the v2 API requires a separate
|
||||
* query for each ISSN found in the item. The ISSNs are extracted using the configured
|
||||
* issnItemExtractor(s) in the SHERPA spring configuration.
|
||||
* The ISSNs are not validated with a regular expression or other rules - any values
|
||||
* extracted will be included in API queries.
|
||||
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
|
||||
* @param context DSpace context
|
||||
* @param item DSpace item containing ISSNs to be checked
|
||||
* @return SHERPA v2 API response (policy data)
|
||||
*/
|
||||
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
return null;
|
||||
} else {
|
||||
return sherpaService.searchByJournalISSN(StringUtils.join(issns, ","));
|
||||
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
|
||||
Iterator<String> issnIterator = issns.iterator();
|
||||
List<SHERPAResponse> responses = new LinkedList<>();
|
||||
while (issnIterator.hasNext()) {
|
||||
String issn = issnIterator.next();
|
||||
SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
|
||||
if (response.isError()) {
|
||||
// Continue with loop
|
||||
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
|
||||
+ ": " + response.getMessage());
|
||||
}
|
||||
// Store this response, even if it has an error (useful for UI reporting)
|
||||
responses.add(response);
|
||||
}
|
||||
if (responses.isEmpty()) {
|
||||
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
|
||||
}
|
||||
return responses;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA for journal policies matching the passed ISSN.
|
||||
* The ISSN are not validated with a regular expression or other rules - any String
|
||||
* passed to this method will be considered an ISSN for the purposes of an API query
|
||||
* @param issn ISSN string
|
||||
* @return SHERPA v2 API response object (policy data)
|
||||
*/
|
||||
public SHERPAResponse searchRelatedJournalsByISSN(String issn) {
|
||||
return sherpaService.searchByJournalISSN(issn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Using the configured itemIssnExtractors from SHERPA configuration, extract
|
||||
* ISSNs from item metadata or authority values
|
||||
* @param context DSpace context
|
||||
* @param item Item containing metadata / authority values
|
||||
* @return Set of ISSN strings
|
||||
*/
|
||||
public Set<String> getISSNs(Context context, Item item) {
|
||||
Set<String> issns = new LinkedHashSet<String>();
|
||||
if (configuration.getIssnItemExtractors() == null) {
|
||||
@@ -68,6 +130,13 @@ public class SHERPASubmitService {
|
||||
return issns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple boolean test that runs the getISSNs extraction method
|
||||
* to determine whether an item has any ISSNs at all
|
||||
* @param context DSpace context
|
||||
* @param item Item to test
|
||||
* @return boolean indicating presence of >=1 ISSNs
|
||||
*/
|
||||
public boolean hasISSNs(Context context, Item item) {
|
||||
Set<String> issns = getISSNs(context, item);
|
||||
if (issns == null || issns.size() == 0) {
|
||||
|
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Journal object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this is generally structured
|
||||
* as a list in the SHERPAResponse object.
|
||||
* Each journal contains a list of publisher data and list of publishing policies as well as basic metadata
|
||||
* about the journal such as ISSNs, titles, whether it appears in DOAJ, primary publisher, etc.
|
||||
* @see SHERPAResponse
|
||||
* @see org.dspace.external.provider.impl.SHERPAv2JournalDataProvider
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAJournal {
|
||||
|
||||
private List<String> titles;
|
||||
private String url;
|
||||
private List<String> issns;
|
||||
private String romeoPub;
|
||||
private String zetoPub;
|
||||
private SHERPAPublisher publisher;
|
||||
private List<SHERPAPublisher> publishers;
|
||||
private List<SHERPAPublisherPolicy> policies;
|
||||
private Boolean inDOAJ;
|
||||
|
||||
public SHERPAJournal() {
|
||||
|
||||
}
|
||||
|
||||
public List<String> getTitles() {
|
||||
return titles;
|
||||
}
|
||||
|
||||
public void setTitles(List<String> titles) {
|
||||
this.titles = titles;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public List<String> getIssns() {
|
||||
return issns;
|
||||
}
|
||||
|
||||
public void setIssns(List<String> issns) {
|
||||
this.issns = issns;
|
||||
}
|
||||
|
||||
public String getRomeoPub() {
|
||||
return romeoPub;
|
||||
}
|
||||
|
||||
public void setRomeoPub(String romeoPub) {
|
||||
this.romeoPub = romeoPub;
|
||||
}
|
||||
|
||||
public String getZetoPub() {
|
||||
return zetoPub;
|
||||
}
|
||||
|
||||
public void setZetoPub(String zetoPub) {
|
||||
this.zetoPub = zetoPub;
|
||||
}
|
||||
|
||||
public SHERPAPublisher getPublisher() {
|
||||
return publisher;
|
||||
}
|
||||
|
||||
public void setPublisher(SHERPAPublisher publisher) {
|
||||
this.publisher = publisher;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
|
||||
public void setPublishers(List<SHERPAPublisher> publishers) {
|
||||
this.publishers = publishers;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisherPolicy> getPolicies() {
|
||||
return policies;
|
||||
}
|
||||
|
||||
public void setPolicies(List<SHERPAPublisherPolicy> policies) {
|
||||
this.policies = policies;
|
||||
}
|
||||
|
||||
public Boolean getInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(Boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Permitted Version object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for journal deposit policies, this data is contained within a publisher policy.
|
||||
* Each permitted version is for a particular article version (eg. submitted, accepted, published) and contains
|
||||
*
|
||||
* A list of general conditions / terms for deposit of this version of work
|
||||
* A list of allowed locations (eg. institutional repository, personal homepage, non-commercial repository)
|
||||
* A list of prerequisite conditions for deposit (eg. attribution, linking to published version)
|
||||
* A list of required licences for the deposited work (eg. CC-BY-NC)
|
||||
* Embargo requirements, if any
|
||||
*
|
||||
* This class also has some helper data for labels, which can be used with i18n when displaying policy information
|
||||
*
|
||||
* @see SHERPAPublisherPolicy
|
||||
*/
|
||||
public class SHERPAPermittedVersion {
|
||||
|
||||
// Version (submitted, accepted, published)
|
||||
private String articleVersion;
|
||||
|
||||
// Version label
|
||||
private String articleVersionLabel;
|
||||
|
||||
// Option number
|
||||
private int option;
|
||||
|
||||
// General conditions
|
||||
private List<String> conditions;
|
||||
// Prerequisites (eg. if required by funder)
|
||||
private List<String> prerequisites;
|
||||
// Allowed locations
|
||||
private List<String> locations;
|
||||
// Required license(s)
|
||||
private List<String> licenses;
|
||||
// Embargo
|
||||
private SHERPAEmbargo embargo;
|
||||
|
||||
protected class SHERPAEmbargo {
|
||||
String units;
|
||||
int amount;
|
||||
}
|
||||
|
||||
public String getArticleVersion() {
|
||||
return articleVersion;
|
||||
}
|
||||
|
||||
public void setArticleVersion(String articleVersion) {
|
||||
this.articleVersion = articleVersion;
|
||||
}
|
||||
|
||||
public List<String> getConditions() {
|
||||
return conditions;
|
||||
}
|
||||
|
||||
public void setConditions(List<String> conditions) {
|
||||
this.conditions = conditions;
|
||||
}
|
||||
|
||||
public List<String> getPrerequisites() {
|
||||
return prerequisites;
|
||||
}
|
||||
|
||||
public void setPrerequisites(List<String> prerequisites) {
|
||||
this.prerequisites = prerequisites;
|
||||
}
|
||||
|
||||
public List<String> getLocations() {
|
||||
return locations;
|
||||
}
|
||||
|
||||
public void setLocations(List<String> locations) {
|
||||
this.locations = locations;
|
||||
}
|
||||
|
||||
public List<String> getLicenses() {
|
||||
return licenses;
|
||||
}
|
||||
|
||||
public void setLicenses(List<String> licenses) {
|
||||
this.licenses = licenses;
|
||||
}
|
||||
|
||||
public SHERPAEmbargo getEmbargo() {
|
||||
return embargo;
|
||||
}
|
||||
|
||||
public void setEmbargo(SHERPAEmbargo embargo) {
|
||||
this.embargo = embargo;
|
||||
}
|
||||
|
||||
public int getOption() {
|
||||
return option;
|
||||
}
|
||||
|
||||
public void setOption(int option) {
|
||||
this.option = option;
|
||||
}
|
||||
|
||||
public String getArticleVersionLabel() {
|
||||
return articleVersionLabel;
|
||||
}
|
||||
|
||||
public void setArticleVersionLabel(String articleVersionLabel) {
|
||||
this.articleVersionLabel = articleVersionLabel;
|
||||
}
|
||||
}
|
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a search for SHERPA journal deposit policy, this publisher object will appear in a list of publishers
|
||||
* from the journal object, and as a single publisher member for the primary/current publisher of the journal.
|
||||
* In a search for SHERPA publisher information, this object will appear in a list of publishers from the main
|
||||
* SHERPA Publisher Response object
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPublisherResponse
|
||||
*/
|
||||
public class SHERPAPublisher {
|
||||
private String name = null;
|
||||
private String relationshipType;
|
||||
private String country;
|
||||
private String uri = null;
|
||||
private String identifier = null;
|
||||
private int publicationCount;
|
||||
|
||||
// this is not technically in the same place in SHERPA data model but it makes more sense to apply it here
|
||||
// is it is treated as a 'special case' - just for printing links to paid OA access policies
|
||||
private String paidAccessDescription;
|
||||
private String paidAccessUrl;
|
||||
|
||||
public SHERPAPublisher() {
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getRelationshipType() {
|
||||
return relationshipType;
|
||||
}
|
||||
|
||||
public void setRelationshipType(String relationshipType) {
|
||||
this.relationshipType = relationshipType;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPaidAccessDescription() {
|
||||
return paidAccessDescription;
|
||||
}
|
||||
|
||||
public void setPaidAccessDescription(String paidAccessDescription) {
|
||||
this.paidAccessDescription = paidAccessDescription;
|
||||
}
|
||||
|
||||
public String getPaidAccessUrl() {
|
||||
return paidAccessUrl;
|
||||
}
|
||||
|
||||
public void setPaidAccessUrl(String paidAccessUrl) {
|
||||
this.paidAccessUrl = paidAccessUrl;
|
||||
}
|
||||
|
||||
public String getIdentifier() {
|
||||
return identifier;
|
||||
}
|
||||
|
||||
public void setIdentifier(String identifier) {
|
||||
this.identifier = identifier;
|
||||
}
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA Publisher Policy object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* In a SHERPA search for deposit policies, each journal contains one or more publisher policies
|
||||
* Each publisher policies contains a list of different article versions (eg. submitted, accepted, published)
|
||||
* which hold the data about what can be done with each version.
|
||||
* This class also holds copyright URLs and other policy URLs, as well as some helper information for display
|
||||
* of overall policies in UI (as per legacy SHERPA data)
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
* @see SHERPAPermittedVersion
|
||||
*/
|
||||
public class SHERPAPublisherPolicy {
|
||||
|
||||
private int id;
|
||||
private boolean openAccessPermitted;
|
||||
private String uri;
|
||||
private String internalMoniker;
|
||||
private List<SHERPAPermittedVersion> permittedVersions;
|
||||
private Map<String, String> urls;
|
||||
private boolean openAccessProhibited;
|
||||
private int publicationCount;
|
||||
|
||||
// The legacy "can" / "cannot" indicators
|
||||
private String preArchiving = "cannot";
|
||||
private String postArchiving = "cannot";
|
||||
private String pubArchiving = "cannot";
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessPermitted() {
|
||||
return openAccessPermitted;
|
||||
}
|
||||
|
||||
public void setOpenAccessPermitted(boolean openAccessPermitted) {
|
||||
this.openAccessPermitted = openAccessPermitted;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getInternalMoniker() {
|
||||
return internalMoniker;
|
||||
}
|
||||
|
||||
public void setInternalMoniker(String internalMoniker) {
|
||||
this.internalMoniker = internalMoniker;
|
||||
}
|
||||
|
||||
public List<SHERPAPermittedVersion> getPermittedVersions() {
|
||||
return permittedVersions;
|
||||
}
|
||||
|
||||
public void setPermittedVersions(List<SHERPAPermittedVersion> permittedVersions) {
|
||||
this.permittedVersions = permittedVersions;
|
||||
}
|
||||
|
||||
public Map<String, String> getUrls() {
|
||||
return urls;
|
||||
}
|
||||
|
||||
public void setUrls(Map<String, String> urls) {
|
||||
this.urls = urls;
|
||||
}
|
||||
|
||||
public boolean isOpenAccessProhibited() {
|
||||
return openAccessProhibited;
|
||||
}
|
||||
|
||||
public void setOpenAccessProhibited(boolean openAccessProhibited) {
|
||||
this.openAccessProhibited = openAccessProhibited;
|
||||
}
|
||||
|
||||
public int getPublicationCount() {
|
||||
return publicationCount;
|
||||
}
|
||||
|
||||
public void setPublicationCount(int publicationCount) {
|
||||
this.publicationCount = publicationCount;
|
||||
}
|
||||
|
||||
public String getPreArchiving() {
|
||||
return preArchiving;
|
||||
}
|
||||
|
||||
public void setPreArchiving(String preArchiving) {
|
||||
this.preArchiving = preArchiving;
|
||||
}
|
||||
|
||||
public String getPostArchiving() {
|
||||
return postArchiving;
|
||||
}
|
||||
|
||||
public void setPostArchiving(String postArchiving) {
|
||||
this.postArchiving = postArchiving;
|
||||
}
|
||||
|
||||
public String getPubArchiving() {
|
||||
return pubArchiving;
|
||||
}
|
||||
|
||||
public void setPubArchiving(String pubArchiving) {
|
||||
this.pubArchiving = pubArchiving;
|
||||
}
|
||||
}
|
@@ -0,0 +1,223 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publisher search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
*
|
||||
* @see SHERPAPublisher
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAPublisherResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed publisher results
|
||||
private List<SHERPAPublisher> publishers;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = Logger.getLogger(SHERPAPublisherResponse.class);
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAPublisherResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct simple list of publisher objects
|
||||
* This method does not return a value, but rather populates the metadata and publishers objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array in this context is publisher results - parsing is more simple than
|
||||
// parsing the full journal / policy responses
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.publishers = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
|
||||
JSONObject item = items.getJSONObject(0);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
if (metadata.getId() >= 0) {
|
||||
// Set publisher identifier to be the internal SHERPA ID
|
||||
// eg. '30' (Elsevier)
|
||||
sherpaPublisher.setIdentifier(String.valueOf(metadata.getId()));
|
||||
}
|
||||
}
|
||||
|
||||
// Set publisher name
|
||||
sherpaPublisher.setName(parsePublisherName(item));
|
||||
|
||||
// Set publisher URL
|
||||
sherpaPublisher.setUri(parsePublisherURL(item));
|
||||
|
||||
this.publishers.add(sherpaPublisher);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAPublisherResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public List<SHERPAPublisher> getPublishers() {
|
||||
return publishers;
|
||||
}
|
||||
}
|
@@ -0,0 +1,557 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.dspace.core.I18nUtil;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONException;
|
||||
import org.json.JSONObject;
|
||||
import org.json.JSONTokener;
|
||||
|
||||
/**
|
||||
* Model class for the SHERPAv2 API (JSON) response for a publication (journal) search
|
||||
* The structure and approached used is quite different to the simple v1 API used previously
|
||||
* The structure is based on journal data, which in turn contains data about publishers and policies
|
||||
*
|
||||
* @see SHERPAJournal
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*
|
||||
*/
|
||||
public class SHERPAResponse {
|
||||
// Is this response to be treated as an error?
|
||||
private boolean error;
|
||||
|
||||
// Error message
|
||||
private String message;
|
||||
|
||||
// Parsed system metadata from search results
|
||||
private SHERPASystemMetadata metadata;
|
||||
|
||||
// List of parsed journal results
|
||||
private List<SHERPAJournal> journals;
|
||||
|
||||
// Internal Sherpa ID
|
||||
private int id;
|
||||
|
||||
// SHERPA URI (the human page version of this API response)
|
||||
private String uri;
|
||||
|
||||
// Format enum - currently only JSON is supported
|
||||
public enum SHERPAFormat {
|
||||
JSON, XML
|
||||
};
|
||||
|
||||
private static Logger log = Logger.getLogger(SHERPAResponse.class);
|
||||
|
||||
/**
|
||||
* Parse SHERPA v2 API for a given format
|
||||
* @param input - input stream from the HTTP response content
|
||||
* @param format - requested format
|
||||
* @throws IOException
|
||||
*/
|
||||
public SHERPAResponse(InputStream input, SHERPAFormat format) throws IOException {
|
||||
if (format == SHERPAFormat.JSON) {
|
||||
parseJSON(input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display
|
||||
* This method does not return a value, but rather populates the metadata and journals objects
|
||||
* with data parsed from the JSON.
|
||||
* @param jsonData - the JSON input stream from the API result response body
|
||||
*/
|
||||
private void parseJSON(InputStream jsonData) throws IOException {
|
||||
InputStreamReader streamReader = new InputStreamReader(jsonData);
|
||||
JSONTokener jsonTokener = new JSONTokener(streamReader);
|
||||
JSONObject httpResponse;
|
||||
try {
|
||||
httpResponse = new JSONObject(jsonTokener);
|
||||
if (httpResponse.has("items")) {
|
||||
JSONArray items = httpResponse.getJSONArray("items");
|
||||
|
||||
// items array is search results, *not* journals or publishers - they are listed for each item
|
||||
// - however, we only ever want one result since we're passing an "equals ISSN" query
|
||||
if (items.length() > 0) {
|
||||
metadata = new SHERPASystemMetadata();
|
||||
this.journals = new LinkedList<>();
|
||||
// Iterate search result items
|
||||
for (int itemIndex = 0; itemIndex < items.length(); itemIndex++) {
|
||||
List<SHERPAPublisher> sherpaPublishers = new LinkedList<>();
|
||||
List<SHERPAPublisherPolicy> policies = new ArrayList<>();
|
||||
SHERPAPublisher sherpaPublisher = new SHERPAPublisher();
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
JSONObject item = items.getJSONObject(0);
|
||||
|
||||
// Parse system metadata (per-item / result information)
|
||||
if (item.has("system_metadata")) {
|
||||
JSONObject systemMetadata = item.getJSONObject("system_metadata");
|
||||
metadata = parseSystemMetadata(systemMetadata);
|
||||
}
|
||||
|
||||
// Parse "publisher policy"
|
||||
// note - most of the information that was previously under 'publisher' is now under here
|
||||
if (item.has("publisher_policy")) {
|
||||
|
||||
// Parse main publisher policies node
|
||||
JSONArray publisherPolicies = item.getJSONArray("publisher_policy");
|
||||
for (int i = 0; i < publisherPolicies.length(); i++) {
|
||||
|
||||
JSONObject policy = publisherPolicies.getJSONObject(i);
|
||||
|
||||
// Special case - quickly check the policy for the 'paid access' option
|
||||
// and continue if found, then parse the rest of the policy
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
}
|
||||
// This seems to be usually policy(ies) for the journal proper
|
||||
// and then an "Open access option" which contains some of the info
|
||||
// that the 'paidaccess' node in the old API used to contain
|
||||
// Look for: internal_moniker = "Open access option"
|
||||
// Check if this is OA options (Paid Access) or not
|
||||
if ("Open access option".equalsIgnoreCase(moniker)) {
|
||||
log.debug("This is the Open access options policy - a special case");
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") &&
|
||||
"Open Access".equalsIgnoreCase(url.getString("description"))) {
|
||||
log.debug("Found OA paid access url: " + url.getString("url"));
|
||||
sherpaPublisher.setPaidAccessDescription(url.getString("description"));
|
||||
sherpaPublisher.setPaidAccessUrl(url.getString("url"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Continue the loop here - this "policy" is a bit different and we
|
||||
// don't want to add irrelevant conditions to the policy
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse the main publisher policy object and add to the list
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = parsePublisherPolicy(policy);
|
||||
policies.add(sherpaPublisherPolicy);
|
||||
}
|
||||
|
||||
// set publisher name - note we're only looking for the first name here
|
||||
// as per previous functionality (for simple display)
|
||||
if (item.has("publishers")) {
|
||||
JSONArray publishers = item.getJSONArray("publishers");
|
||||
if (publishers.length() > 0) {
|
||||
JSONObject publisherElement = publishers.getJSONObject(0);
|
||||
if (publisherElement.has("publisher")) {
|
||||
JSONObject publisher = publisherElement.getJSONObject("publisher");
|
||||
sherpaPublisher.setName(parsePublisherName(publisher));
|
||||
sherpaPublisher.setUri(parsePublisherURL(publisher));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse journal data
|
||||
sherpaJournal = parseJournal(item, sherpaPublisher.getName());
|
||||
}
|
||||
|
||||
sherpaPublishers.add(sherpaPublisher);
|
||||
sherpaJournal.setPublisher(sherpaPublisher);
|
||||
sherpaJournal.setPublishers(sherpaPublishers);
|
||||
sherpaJournal.setPolicies(policies);
|
||||
this.journals.add(sherpaJournal);
|
||||
}
|
||||
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
} else {
|
||||
error = true;
|
||||
message = "No results found";
|
||||
}
|
||||
|
||||
} catch (JSONException e) {
|
||||
log.error("Failed to parse SHERPA response", e);
|
||||
error = true;
|
||||
} finally {
|
||||
streamReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse system metadata and return populated SHERPASystemMetadata object
|
||||
* @param systemMetadata
|
||||
*/
|
||||
private SHERPASystemMetadata parseSystemMetadata(JSONObject systemMetadata) {
|
||||
|
||||
SHERPASystemMetadata metadata = new SHERPASystemMetadata();
|
||||
|
||||
if (systemMetadata.has("uri")) {
|
||||
this.uri = systemMetadata.getString("uri");
|
||||
metadata.setUri(this.uri);
|
||||
} else {
|
||||
log.error("SHERPA URI missing for API response item");
|
||||
}
|
||||
if (systemMetadata.has("id")) {
|
||||
this.id = systemMetadata.getInt("id");
|
||||
metadata.setId(this.id);
|
||||
} else {
|
||||
log.error("SHERPA internal ID missing for API response item");
|
||||
}
|
||||
// Get date created and added - DSpace expects this in the publisher object, though
|
||||
if (systemMetadata.has("date_created")) {
|
||||
metadata.setDateCreated(systemMetadata.getString("date_created"));
|
||||
}
|
||||
if (systemMetadata.has("date_modified")) {
|
||||
metadata.setDateModified(systemMetadata.getString("date_modified"));
|
||||
}
|
||||
// Is this item publicly visible?
|
||||
if (systemMetadata.has("publicly_visible")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("publicly_visible")));
|
||||
}
|
||||
// Is this item listed in the DOAJ?
|
||||
if (systemMetadata.has("listed_in_doaj")) {
|
||||
metadata.setPubliclyVisible ("yes".equals(systemMetadata
|
||||
.getString("listed_in_doaj")));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse journal JSON data and return populated bean
|
||||
* This method also takes publisherName as a string to help construct some
|
||||
* legacy labels
|
||||
* @param item - the main result item JSON (which is the closest thing to an actual 'journal')
|
||||
* @param publisherName - the parsed publisher name
|
||||
* @return
|
||||
*/
|
||||
private SHERPAJournal parseJournal(JSONObject item, String publisherName) {
|
||||
|
||||
SHERPAJournal sherpaJournal = new SHERPAJournal();
|
||||
|
||||
// set journal title
|
||||
if (item.has("title")) {
|
||||
JSONArray titles = item.getJSONArray("title");
|
||||
if (titles.length() > 0) {
|
||||
List<String> titleList = new ArrayList<>();
|
||||
for (int t = 0; t < titles.length(); t++) {
|
||||
JSONObject title = titles.getJSONObject(t);
|
||||
if (title.has("title")) {
|
||||
titleList.add(title.getString("title").trim());
|
||||
}
|
||||
}
|
||||
sherpaJournal.setTitles(titleList);
|
||||
if (titleList.size() > 0) {
|
||||
// Faking this a bit based on what I'd seen - not in the API v2 data
|
||||
sherpaJournal.setRomeoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
sherpaJournal.setZetoPub(publisherName + ": "
|
||||
+ titleList.get(0));
|
||||
log.debug("Found journal title: " + titleList.get(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Journal URL
|
||||
if (item.has("url")) {
|
||||
sherpaJournal.setUrl(item.getString("url"));
|
||||
}
|
||||
|
||||
// set ISSNs
|
||||
if (item.has("issns")) {
|
||||
JSONArray issns = item.getJSONArray("issns");
|
||||
// just get first - DSpace data model only allows for one
|
||||
List<String> issnList = new ArrayList<>();
|
||||
for (int ii = 0; ii < issns.length(); ii++) {
|
||||
JSONObject issn = issns.getJSONObject(ii);
|
||||
issnList.add(issn.getString("issn").trim());
|
||||
}
|
||||
sherpaJournal.setIssns(issnList);
|
||||
}
|
||||
|
||||
// Is the item in DOAJ?
|
||||
if (item.has("listed_in_doaj")) {
|
||||
sherpaJournal.setInDOAJ(("yes".equals(item.getString("listed_in_doaj"))));
|
||||
}
|
||||
|
||||
return sherpaJournal;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a publisher_policy JSON data and return a populated bean
|
||||
* @param policy - each publisher policy node in the JSON array
|
||||
* @return populated SHERPAPublisherPolicy object
|
||||
*/
|
||||
private SHERPAPublisherPolicy parsePublisherPolicy(JSONObject policy) {
|
||||
|
||||
SHERPAPublisherPolicy sherpaPublisherPolicy = new SHERPAPublisherPolicy();
|
||||
|
||||
// Get and set monikers
|
||||
String moniker = null;
|
||||
if (policy.has("internal_moniker")) {
|
||||
moniker = policy.getString("internal_moniker");
|
||||
sherpaPublisherPolicy.setInternalMoniker(moniker);
|
||||
}
|
||||
|
||||
// URLs (used to be Copyright Links)
|
||||
if (policy.has("urls")) {
|
||||
JSONArray urls = policy.getJSONArray("urls");
|
||||
Map<String, String> copyrightLinks = new TreeMap<>();
|
||||
for (int u = 0; u < urls.length(); u++) {
|
||||
JSONObject url = urls.getJSONObject(u);
|
||||
if (url.has("description") && url.has("url")) {
|
||||
log.debug("Setting copyright URL: " + url.getString("url"));
|
||||
copyrightLinks.put(url.getString("url"), url.getString("description"));
|
||||
}
|
||||
}
|
||||
sherpaPublisherPolicy.setUrls(copyrightLinks);
|
||||
}
|
||||
|
||||
// Permitted OA options
|
||||
int submittedOption = 0;
|
||||
int acceptedOption = 0;
|
||||
int publishedOption = 0;
|
||||
int currentOption = 0;
|
||||
if (policy.has("permitted_oa")) {
|
||||
List<String> allowed = new ArrayList<>();
|
||||
JSONArray permittedOA = policy.getJSONArray("permitted_oa");
|
||||
List<SHERPAPermittedVersion> permittedVersions = new ArrayList<>();
|
||||
|
||||
// Iterate each permitted OA version / option. The permitted_oa node is also known as a 'pathway' --
|
||||
// essentially "a way to get a work into a repository". Each pathway could refer to one article version
|
||||
// like a pre-print, or multiple versions might have the same acceptable locations and conditions.
|
||||
// As described below, where multiple versions are referenced in a single permitted_oa pathway, they will
|
||||
// be split out and treated separately. This keeps processing simple, especially later in display or
|
||||
// compliance checking when it is preferred to group / indicate rules by the article version
|
||||
for (int p = 0; p < permittedOA.length(); p++) {
|
||||
JSONObject permitted = permittedOA.getJSONObject(p);
|
||||
// Although it adds redundancy, we will treat each 'article version' within
|
||||
// the permitted_oa ("pathway") node as a separate version altogether to keep the rest of our display
|
||||
// handled nicely. This was confirmed as an appropriate approach by JISC
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
for (int v = 0; v < versions.length(); v++) {
|
||||
// Parse this permitted_oa node but specifically looking for the article_version 'v'
|
||||
SHERPAPermittedVersion permittedVersion = parsePermittedVersion(permitted, v);
|
||||
|
||||
// To determine which option # we are, inspect article versions and set
|
||||
allowed.add(permittedVersion.getArticleVersion());
|
||||
if ("submitted".equals(permittedVersion.getArticleVersion())) {
|
||||
submittedOption++;
|
||||
currentOption = submittedOption;
|
||||
} else if ("accepted".equals(permittedVersion.getArticleVersion())) {
|
||||
acceptedOption++;
|
||||
currentOption = acceptedOption;
|
||||
} else if ("published".equals(permittedVersion.getArticleVersion())) {
|
||||
publishedOption++;
|
||||
currentOption = publishedOption;
|
||||
}
|
||||
permittedVersion.setOption(currentOption);
|
||||
permittedVersions.add(permittedVersion);
|
||||
}
|
||||
}
|
||||
|
||||
// Populate the old indicators into the publisher policy object
|
||||
if (allowed.contains("submitted")) {
|
||||
sherpaPublisherPolicy.setPreArchiving("can");
|
||||
}
|
||||
if (allowed.contains("accepted")) {
|
||||
sherpaPublisherPolicy.setPostArchiving("can");
|
||||
}
|
||||
if (allowed.contains("published")) {
|
||||
sherpaPublisherPolicy.setPubArchiving("can");
|
||||
}
|
||||
|
||||
}
|
||||
sherpaPublisherPolicy.setPermittedVersions(permittedVersions);
|
||||
}
|
||||
|
||||
return sherpaPublisherPolicy;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse permitted version JSON and populate new bean from the data
|
||||
* @param permitted - each 'permitted_oa' node in the JSON array
|
||||
* @return populated SHERPAPermittedVersion object
|
||||
*/
|
||||
private SHERPAPermittedVersion parsePermittedVersion(JSONObject permitted, int index) {
|
||||
|
||||
SHERPAPermittedVersion permittedVersion = new SHERPAPermittedVersion();
|
||||
|
||||
// Get the article version, which is ultimately used for the ticks / crosses
|
||||
// in the UI display. My assumptions around translation:
|
||||
// submitted = preprint
|
||||
// accepted = postprint
|
||||
// published = pdfversion
|
||||
// These strings can be used to construct i18n messages.
|
||||
String articleVersion = "unknown";
|
||||
String versionLabel = "Unknown";
|
||||
|
||||
// Each 'permitted OA' can actually refer to multiple versions
|
||||
if (permitted.has("article_version")) {
|
||||
JSONArray versions = permitted.getJSONArray("article_version");
|
||||
|
||||
// Get one particular article version to return as a PermittedVersion. The outer loop calling this
|
||||
// is iterating all permitted_oa and permitted_oa->article_version array members
|
||||
articleVersion = versions.getString(index);
|
||||
permittedVersion.setArticleVersion(articleVersion);
|
||||
log.debug("Added allowed version: " + articleVersion + " to list");
|
||||
}
|
||||
|
||||
// Add labels for this particular article version
|
||||
if ("submitted".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.submitted-version-label");
|
||||
} else if ("accepted".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.accepted-version-label");
|
||||
} else if ("published".equals(articleVersion)) {
|
||||
versionLabel = I18nUtil.getMessage("jsp.sherpa.published-version-label");
|
||||
}
|
||||
// Set the article version label based on the i18n text set above
|
||||
permittedVersion.setArticleVersionLabel(versionLabel);
|
||||
|
||||
// These are now child arrays, in old API they were explicit like
|
||||
// "preprint restrictions", etc., and just contained text rather than data
|
||||
if (permitted.has("conditions")) {
|
||||
List<String> conditionList = new ArrayList<>();
|
||||
JSONArray conditions = permitted.getJSONArray("conditions");
|
||||
for (int c = 0; c < conditions.length(); c++) {
|
||||
conditionList.add(conditions.getString(c).trim());
|
||||
}
|
||||
permittedVersion.setConditions(conditionList);
|
||||
}
|
||||
|
||||
// Any prerequisites for this option (eg required by funder)
|
||||
List<String> prerequisites = new ArrayList<>();
|
||||
if (permitted.has("prerequisites")) {
|
||||
JSONObject prereqs = permitted.getJSONObject("prerequisites");
|
||||
if (prereqs.has("prerequisites_phrases")) {
|
||||
JSONArray phrases = prereqs.getJSONArray("prerequisites_phrases");
|
||||
for (int pp = 0; pp < phrases.length(); pp++) {
|
||||
JSONObject phrase = phrases.getJSONObject(pp);
|
||||
if (phrase.has("phrase")) {
|
||||
prerequisites.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setPrerequisites(prerequisites);
|
||||
|
||||
// Locations where this version / option may be archived
|
||||
List<String> sherpaLocations = new ArrayList<>();
|
||||
if (permitted.has("location")) {
|
||||
JSONObject locations = permitted.getJSONObject("location");
|
||||
if (locations.has("location_phrases")) {
|
||||
JSONArray locationPhrases = locations.getJSONArray("location_phrases");
|
||||
if (locationPhrases.length() > 0) {
|
||||
for (int l = 0; l < locationPhrases.length(); l++) {
|
||||
JSONObject locationPhrase = locationPhrases.getJSONObject(l);
|
||||
if (locationPhrase.has("phrase")) {
|
||||
sherpaLocations.add(locationPhrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLocations(sherpaLocations);
|
||||
|
||||
List<String> sherpaLicenses = new ArrayList<>();
|
||||
// required licences
|
||||
if (permitted.has("license")) {
|
||||
JSONArray licences = permitted.getJSONArray("license");
|
||||
for (int l = 0; l < licences.length(); l++) {
|
||||
JSONObject licence = licences.getJSONObject(l);
|
||||
if (licence.has("license_phrases")) {
|
||||
JSONArray phrases = licence.getJSONArray("license_phrases");
|
||||
for (int ll = 0; ll < phrases.length(); ll++) {
|
||||
JSONObject phrase = phrases.getJSONObject(ll);
|
||||
if (phrase.has("phrase")) {
|
||||
sherpaLicenses.add(phrase.getString("phrase").trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
permittedVersion.setLicenses(sherpaLicenses);
|
||||
|
||||
return permittedVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse publisher array and return the first name string found
|
||||
* @param publisher - array of publisher JSON data
|
||||
* @return first publisher name found (trimmed String)
|
||||
*/
|
||||
private String parsePublisherName(JSONObject publisher) {
|
||||
String name = null;
|
||||
if (publisher.has("name")) {
|
||||
JSONArray publisherNames = publisher.getJSONArray("name");
|
||||
if (publisherNames.length() > 0) {
|
||||
JSONObject publisherName = publisherNames.getJSONObject(0);
|
||||
if (publisherName.has("name")) {
|
||||
name = publisherName.getString("name").trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse publisher URL from the json data
|
||||
* @param publisher - publisher object (from JSON array)
|
||||
* @return publisher URL as string
|
||||
*/
|
||||
private String parsePublisherURL(JSONObject publisher) {
|
||||
if (publisher.has("url")) {
|
||||
return publisher.getString("url");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create new response object to be handled as an error
|
||||
* @param message - the message to render in logs or error pages
|
||||
*/
|
||||
public SHERPAResponse(String message) {
|
||||
this.message = message;
|
||||
this.error = true;
|
||||
}
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public List<SHERPAJournal> getJournals() {
|
||||
return journals;
|
||||
}
|
||||
|
||||
public SHERPASystemMetadata getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
}
|
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
|
||||
*
|
||||
* This data is included in both journal deposit policy and publisher searches and contains basic metadata
|
||||
* about the SHERPA record and API response, eg. creation and modification dates, internal IDs, permissions, etc.
|
||||
*
|
||||
* @see SHERPAResponse
|
||||
* @see SHERPAPublisherResponse
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPASystemMetadata {
|
||||
|
||||
private int id;
|
||||
private String uri;
|
||||
private String dateCreated;
|
||||
private String dateModified;
|
||||
private boolean isPubliclyVisible = false;
|
||||
private boolean inDOAJ = false;
|
||||
|
||||
public SHERPASystemMetadata() {
|
||||
}
|
||||
|
||||
public int getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(int id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public void setUri(String uri) {
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getDateCreated() {
|
||||
return dateCreated;
|
||||
}
|
||||
|
||||
public void setDateCreated(String dateCreated) {
|
||||
this.dateCreated = dateCreated;
|
||||
}
|
||||
|
||||
public String getDateModified() {
|
||||
return dateModified;
|
||||
}
|
||||
|
||||
public void setDateModified(String dateModified) {
|
||||
this.dateModified = dateModified;
|
||||
}
|
||||
|
||||
public boolean isPubliclyVisible() {
|
||||
return isPubliclyVisible;
|
||||
}
|
||||
|
||||
public void setPubliclyVisible(boolean publiclyVisible) {
|
||||
isPubliclyVisible = publiclyVisible;
|
||||
}
|
||||
|
||||
public boolean isInDOAJ() {
|
||||
return inDOAJ;
|
||||
}
|
||||
|
||||
public void setInDOAJ(boolean inDOAJ) {
|
||||
this.inDOAJ = inDOAJ;
|
||||
}
|
||||
}
|
@@ -0,0 +1,38 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.v2;
|
||||
|
||||
/**
|
||||
* SHERPA v2 API query handling utility methods (static). Used by external data providers and SHERPA service.
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public final class SHERPAUtils {
|
||||
|
||||
// Private constructor (since this is a Utility class)
|
||||
private SHERPAUtils() {}
|
||||
|
||||
/**
|
||||
* Sanitise a SHERPA v2 API query for some special JSON characters to help with parsing at remote end
|
||||
* Strip all these characters: "'{};
|
||||
* The URI builder used in the provider and service classes will perform URL encoding. This string
|
||||
* is the raw query submitted to the provider or service.
|
||||
* @param query query string
|
||||
* @return safe query string
|
||||
*/
|
||||
public static String sanitiseQuery(String query) {
|
||||
String safe = query;
|
||||
try {
|
||||
safe = query.replaceAll("['{}\";]", "");
|
||||
} catch (NullPointerException e) {
|
||||
safe = "";
|
||||
}
|
||||
return safe;
|
||||
}
|
||||
|
||||
}
|
@@ -27,6 +27,7 @@ import org.apache.commons.cli.DefaultParser;
|
||||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -40,6 +41,11 @@ import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
@@ -61,6 +67,7 @@ public class GenerateSitemaps {
|
||||
private static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private static final ConfigurationService configurationService =
|
||||
DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
private static final SearchService searchService = SearchUtils.getSearchService();
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
@@ -237,14 +244,37 @@ public class GenerateSitemaps {
|
||||
|
||||
while (allItems.hasNext()) {
|
||||
Item i = allItems.next();
|
||||
String url = uiURLStem + "/items/" + i.getID();
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
DiscoverQuery entityQuery = new DiscoverQuery();
|
||||
entityQuery.setQuery("search.uniqueid:\"Item-" + i.getID() + "\" and entityType:*");
|
||||
entityQuery.addSearchField("entityType");
|
||||
|
||||
try {
|
||||
DiscoverResult discoverResult = searchService.search(c, entityQuery);
|
||||
|
||||
String url;
|
||||
if (CollectionUtils.isNotEmpty(discoverResult.getIndexableObjects())
|
||||
&& CollectionUtils.isNotEmpty(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType"))
|
||||
&& StringUtils.isNotBlank(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0)).get(0).getSearchFieldValues("entityType").get(0))
|
||||
) {
|
||||
url = uiURLStem + "/entities/" + StringUtils.lowerCase(discoverResult.getSearchDocument(
|
||||
discoverResult.getIndexableObjects().get(0))
|
||||
.get(0).getSearchFieldValues("entityType").get(0)) + "/" + i.getID();
|
||||
} else {
|
||||
url = uiURLStem + "/items/" + i.getID();
|
||||
}
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, lastMod);
|
||||
}
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg.addURL(url, lastMod);
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Failed getting entitytype through solr for item " + i.getID() + ": " + e.getMessage());
|
||||
}
|
||||
|
||||
c.uncacheEntity(i);
|
||||
|
@@ -22,7 +22,7 @@ public interface DSpaceWebappMXBean {
|
||||
public boolean isUI();
|
||||
|
||||
/**
|
||||
* What kind of webapp? XMLUI, OAI, etc.
|
||||
* What kind of webapp? Server, etc.
|
||||
*
|
||||
* @return kind of webapp
|
||||
*/
|
||||
|
@@ -141,6 +141,15 @@ public class LDAPAuthentication
|
||||
// Prevents anonymous users from being added to this group, and the second check
|
||||
// ensures they are LDAP users
|
||||
try {
|
||||
// without a logged in user, this method should return an empty list
|
||||
if (context.getCurrentUser() == null) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
// if the logged in user does not have a netid, it's not an LDAP user
|
||||
// and this method should return an empty list
|
||||
if (context.getCurrentUser().getNetid() == null) {
|
||||
return Collections.EMPTY_LIST;
|
||||
}
|
||||
if (!context.getCurrentUser().getNetid().equals("")) {
|
||||
String groupName = configurationService.getProperty("authentication-ldap.login.specialgroup");
|
||||
if ((groupName != null) && (!groupName.trim().equals(""))) {
|
||||
@@ -681,7 +690,7 @@ public class LDAPAuthentication
|
||||
if (StringUtils.isNotBlank(dn)) {
|
||||
System.out.println("dn:" + dn);
|
||||
int i = 1;
|
||||
String groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + i);
|
||||
String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i);
|
||||
|
||||
boolean cmp;
|
||||
|
||||
@@ -721,7 +730,7 @@ public class LDAPAuthentication
|
||||
}
|
||||
}
|
||||
|
||||
groupMap = configurationService.getProperty("authentication-ldap", "login.groupmap." + ++i);
|
||||
groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + ++i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -305,6 +305,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
// metadataValueService.update(context, metadataValue);
|
||||
dso.addDetails(metadataField.toString());
|
||||
}
|
||||
setMetadataModified(dso);
|
||||
return newMetadata;
|
||||
}
|
||||
|
||||
|
@@ -112,6 +112,16 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
@Transient
|
||||
private transient ItemService itemService;
|
||||
|
||||
/**
|
||||
* True if anything else was changed since last metadata retrieval()
|
||||
* (to drive metadata cache)
|
||||
*/
|
||||
@Transient
|
||||
private boolean modifiedMetadataCache = true;
|
||||
|
||||
@Transient
|
||||
private List<MetadataValue> cachedMetadata = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.content.service.ItemService#create(Context, WorkspaceItem)}
|
||||
@@ -373,4 +383,23 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
}
|
||||
return itemService;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setMetadataModified() {
|
||||
super.setMetadataModified();
|
||||
modifiedMetadataCache = true;
|
||||
}
|
||||
|
||||
public boolean isModifiedMetadataCache() {
|
||||
return modifiedMetadataCache;
|
||||
}
|
||||
|
||||
protected List<MetadataValue> getCachedMetadata() {
|
||||
return cachedMetadata;
|
||||
}
|
||||
|
||||
protected void setCachedMetadata(List<MetadataValue> cachedMetadata) {
|
||||
this.cachedMetadata = cachedMetadata;
|
||||
modifiedMetadataCache = false;
|
||||
}
|
||||
}
|
||||
|
@@ -685,7 +685,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
|
||||
|
||||
// Remove relationships
|
||||
for (Relationship relationship : relationshipService.findByItem(context, item)) {
|
||||
relationshipService.delete(context, relationship, false, false);
|
||||
relationshipService.forceDelete(context, relationship, false, false);
|
||||
}
|
||||
|
||||
// Remove bundles
|
||||
@@ -1328,42 +1328,33 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
@Override
|
||||
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier, String lang,
|
||||
boolean enableVirtualMetadata) {
|
||||
//Fields of the relation schema are virtual metadata
|
||||
//except for relation.type which is the type of item in the model
|
||||
if (StringUtils.equals(schema, MetadataSchemaEnum.RELATION.getName()) && !StringUtils.equals(element, "type")) {
|
||||
|
||||
List<RelationshipMetadataValue> relationMetadata = relationshipMetadataService
|
||||
.getRelationshipMetadata(item, enableVirtualMetadata);
|
||||
List<MetadataValue> listToReturn = new LinkedList<>();
|
||||
for (MetadataValue metadataValue : relationMetadata) {
|
||||
if (StringUtils.equals(metadataValue.getMetadataField().getElement(), element)) {
|
||||
listToReturn.add(metadataValue);
|
||||
}
|
||||
}
|
||||
listToReturn = sortMetadataValueList(listToReturn);
|
||||
|
||||
return listToReturn;
|
||||
|
||||
} else {
|
||||
List<MetadataValue> dbMetadataValues = super.getMetadata(item, schema, element, qualifier, lang);
|
||||
if (!enableVirtualMetadata) {
|
||||
log.debug("Called getMetadata for " + item.getID() + " without enableVirtualMetadata");
|
||||
return super.getMetadata(item, schema, element, qualifier, lang);
|
||||
}
|
||||
if (item.isModifiedMetadataCache()) {
|
||||
log.debug("Called getMetadata for " + item.getID() + " with invalid cache");
|
||||
//rebuild cache
|
||||
List<MetadataValue> dbMetadataValues = item.getMetadata();
|
||||
|
||||
List<MetadataValue> fullMetadataValueList = new LinkedList<>();
|
||||
if (enableVirtualMetadata) {
|
||||
fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true));
|
||||
|
||||
}
|
||||
fullMetadataValueList.addAll(relationshipMetadataService.getRelationshipMetadata(item, true));
|
||||
fullMetadataValueList.addAll(dbMetadataValues);
|
||||
|
||||
List<MetadataValue> finalList = new LinkedList<>();
|
||||
for (MetadataValue metadataValue : fullMetadataValueList) {
|
||||
if (match(schema, element, qualifier, lang, metadataValue)) {
|
||||
finalList.add(metadataValue);
|
||||
}
|
||||
}
|
||||
finalList = sortMetadataValueList(finalList);
|
||||
return finalList;
|
||||
item.setCachedMetadata(sortMetadataValueList(fullMetadataValueList));
|
||||
}
|
||||
|
||||
log.debug("Called getMetadata for " + item.getID() + " based on cache");
|
||||
// Build up list of matching values based on the cache
|
||||
List<MetadataValue> values = new ArrayList<>();
|
||||
for (MetadataValue dcv : item.getCachedMetadata()) {
|
||||
if (match(schema, element, qualifier, lang, dcv)) {
|
||||
values.add(dcv);
|
||||
}
|
||||
}
|
||||
|
||||
// Create an array of matching values
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -164,6 +164,7 @@ public class Relationship implements ReloadableEntity<Integer> {
|
||||
*/
|
||||
public void setLeftPlace(int leftPlace) {
|
||||
this.leftPlace = leftPlace;
|
||||
leftItem.setMetadataModified();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -180,6 +181,7 @@ public class Relationship implements ReloadableEntity<Integer> {
|
||||
*/
|
||||
public void setRightPlace(int rightPlace) {
|
||||
this.rightPlace = rightPlace;
|
||||
rightItem.setMetadataModified();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -347,22 +347,44 @@ public class RelationshipServiceImpl implements RelationshipService {
|
||||
if (isRelationshipValidToDelete(context, relationship) &&
|
||||
copyToItemPermissionCheck(context, relationship, copyToLeftItem, copyToRightItem)) {
|
||||
// To delete a relationship, a user must have WRITE permissions on one of the related Items
|
||||
copyMetadataValues(context, relationship, copyToLeftItem, copyToRightItem);
|
||||
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
|
||||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
|
||||
relationshipDAO.delete(context, relationship);
|
||||
updatePlaceInRelationship(context, relationship);
|
||||
updateItemsInRelationship(context, relationship);
|
||||
} else {
|
||||
throw new AuthorizeException(
|
||||
"You do not have write rights on this relationship's items");
|
||||
}
|
||||
deleteRelationshipAndCopyToItem(context, relationship, copyToLeftItem, copyToRightItem);
|
||||
|
||||
} else {
|
||||
throw new IllegalArgumentException("The relationship given was not valid");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
||||
throws SQLException, AuthorizeException {
|
||||
log.info(org.dspace.core.LogManager.getHeader(context, "delete_relationship",
|
||||
"relationship_id=" + relationship.getID() + "&" +
|
||||
"copyMetadataValuesToLeftItem=" + copyToLeftItem + "&" +
|
||||
"copyMetadataValuesToRightItem=" + copyToRightItem));
|
||||
if (copyToItemPermissionCheck(context, relationship, copyToLeftItem, copyToRightItem)) {
|
||||
// To delete a relationship, a user must have WRITE permissions on one of the related Items
|
||||
deleteRelationshipAndCopyToItem(context, relationship, copyToLeftItem, copyToRightItem);
|
||||
|
||||
} else {
|
||||
throw new IllegalArgumentException("The relationship given was not valid");
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteRelationshipAndCopyToItem(Context context, Relationship relationship, boolean copyToLeftItem,
|
||||
boolean copyToRightItem) throws SQLException, AuthorizeException {
|
||||
copyMetadataValues(context, relationship, copyToLeftItem, copyToRightItem);
|
||||
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
|
||||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
|
||||
relationshipDAO.delete(context, relationship);
|
||||
updatePlaceInRelationship(context, relationship);
|
||||
updateItemsInRelationship(context, relationship);
|
||||
} else {
|
||||
throw new AuthorizeException(
|
||||
"You do not have write rights on this relationship's items");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Utility method to ensure discovery is updated for the 2 items
|
||||
|
@@ -13,6 +13,7 @@ import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import javax.xml.xpath.XPath;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
@@ -315,6 +316,9 @@ public class DSpaceControlledVocabulary extends SelfNamedPlugin implements Hiera
|
||||
private String getNodeLabel(String key, boolean useHierarchy) {
|
||||
try {
|
||||
Node node = getNode(key);
|
||||
if (Objects.isNull(node)) {
|
||||
return null;
|
||||
}
|
||||
if (useHierarchy) {
|
||||
return this.buildString(node);
|
||||
} else {
|
||||
|
@@ -222,4 +222,12 @@ public class MetadataAuthorityServiceImpl implements MetadataAuthorityService {
|
||||
}
|
||||
return copy;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clearCache() {
|
||||
controlled.clear();
|
||||
minConfidence.clear();
|
||||
|
||||
isAuthorityRequired = null;
|
||||
}
|
||||
}
|
||||
|
@@ -112,4 +112,9 @@ public interface MetadataAuthorityService {
|
||||
* @return the list of metadata field with authority control
|
||||
*/
|
||||
public List<String> getAuthorityMetadata();
|
||||
|
||||
/**
|
||||
* This method has been created to have a way of clearing the cache kept inside the service
|
||||
*/
|
||||
public void clearCache();
|
||||
}
|
||||
|
@@ -8,8 +8,10 @@
|
||||
package org.dspace.content.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
@@ -17,6 +19,7 @@ import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataField_;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
@@ -24,6 +27,7 @@ import org.dspace.content.MetadataSchema_;
|
||||
import org.dspace.content.dao.MetadataFieldDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.hibernate.Session;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the MetadataField object.
|
||||
@@ -33,6 +37,17 @@ import org.dspace.core.Context;
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> implements MetadataFieldDAO {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataFieldDAOImpl.class);
|
||||
|
||||
/**
|
||||
* Cache for improvement the performance of searching metadata fields
|
||||
* This cache only stores IDs, the actual MetadataField is retrieved from hibernate
|
||||
*/
|
||||
private static Map<String, Integer> cachedFields = new HashMap();
|
||||
|
||||
protected MetadataFieldDAOImpl() {
|
||||
super();
|
||||
}
|
||||
@@ -79,6 +94,30 @@ public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> im
|
||||
@Override
|
||||
public MetadataField findByElement(Context context, String metadataSchema, String element, String qualifier)
|
||||
throws SQLException {
|
||||
String key = metadataSchema + "." + element + "." + qualifier;
|
||||
if (cachedFields.containsKey(key)) {
|
||||
Session session = getHibernateSession(context);
|
||||
MetadataField metadataField = null;
|
||||
try {
|
||||
metadataField = session.load(MetadataField.class, cachedFields.get(key));
|
||||
} catch (Throwable e) {
|
||||
log.error("Failed to load metadata field " + key + " using ID " + cachedFields.get(key));
|
||||
}
|
||||
try {
|
||||
if (metadataField != null &&
|
||||
(metadataField.getMetadataSchema().getName() + "." + metadataField.getElement() +
|
||||
"." + metadataField.getQualifier()).equals(key)) {
|
||||
return metadataField;
|
||||
} else {
|
||||
cachedFields.remove(key);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.error("Failed to verify consistence of metadata field " + key +
|
||||
" using ID " + cachedFields.get(key));
|
||||
cachedFields.clear();
|
||||
}
|
||||
}
|
||||
|
||||
Query query;
|
||||
|
||||
if (StringUtils.isNotBlank(qualifier)) {
|
||||
@@ -103,7 +142,11 @@ public class MetadataFieldDAOImpl extends AbstractHibernateDAO<MetadataField> im
|
||||
}
|
||||
query.setHint("org.hibernate.cacheable", Boolean.TRUE);
|
||||
|
||||
return singleResult(query);
|
||||
MetadataField metadataField = singleResult(query);
|
||||
if (metadataField != null) {
|
||||
cachedFields.put(key, metadataField.getID());
|
||||
}
|
||||
return metadataField;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -317,4 +317,17 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
|
||||
*/
|
||||
void delete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
||||
throws SQLException, AuthorizeException;
|
||||
/**
|
||||
* This method is used to delete a Relationship whilst given the possibility to copy the Virtual Metadata created
|
||||
* by this relationship to the left and/or right item.
|
||||
* This method will bypass the cardinality checks on the {@link RelationshipType} for the given {@link Relationship}
|
||||
* This should only be used during the deletion of items so that the min cardinality check can't disallow items
|
||||
* to be deleted
|
||||
* @param context The relevant DSpace context
|
||||
* @param relationship The relationship to be deleted
|
||||
* @param copyToLeftItem A boolean indicating whether we should copy metadata to the left item or not
|
||||
* @param copyToRightItem A boolean indicating whether we should copy metadata to the right item or not
|
||||
*/
|
||||
void forceDelete(Context context, Relationship relationship, boolean copyToLeftItem, boolean copyToRightItem)
|
||||
throws SQLException, AuthorizeException;
|
||||
}
|
@@ -29,7 +29,6 @@ import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.curate.factory.CurateServiceFactory;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
@@ -160,13 +159,9 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
}
|
||||
curator.curate(context, entry.getObjectId());
|
||||
} else {
|
||||
// make eperson who queued task the effective user
|
||||
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
|
||||
if (agent != null) {
|
||||
context.setCurrentUser(agent);
|
||||
}
|
||||
CurateServiceFactory.getInstance().getWorkflowCuratorService()
|
||||
.curate(curator, context, entry.getObjectId());
|
||||
// TODO: Remove this exception once curation tasks are supported by configurable workflow
|
||||
// e.g. see https://github.com/DSpace/DSpace/pull/3157
|
||||
throw new IllegalArgumentException("curation for workflow items is no longer supported");
|
||||
}
|
||||
}
|
||||
queue.release(this.queue, ticket, true);
|
||||
|
@@ -1,422 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import static javax.xml.stream.XMLStreamConstants.CHARACTERS;
|
||||
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
|
||||
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.stream.XMLInputFactory;
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamReader;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.core.service.PluginService;
|
||||
import org.dspace.curate.service.WorkflowCuratorService;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.BasicWorkflowServiceImpl;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
// Warning - static import ahead!
|
||||
|
||||
|
||||
/**
|
||||
* WorkflowCurator manages interactions between curation and workflow.
|
||||
* Specifically, it is invoked in WorkflowManager to allow the
|
||||
* performance of curation tasks during workflow.
|
||||
*
|
||||
* @author richardrodgers
|
||||
*/
|
||||
public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
|
||||
|
||||
/**
|
||||
* Logging category
|
||||
*/
|
||||
private static final Logger log
|
||||
= org.apache.logging.log4j.LogManager.getLogger();
|
||||
|
||||
protected Map<String, TaskSet> tsMap = new HashMap<String, TaskSet>();
|
||||
|
||||
protected final String[] flowSteps = {"step1", "step2", "step3", "archive"};
|
||||
|
||||
@Autowired(required = true)
|
||||
protected CollectionService collectionService;
|
||||
@Autowired(required = true)
|
||||
protected EPersonService ePersonService;
|
||||
@Autowired(required = true)
|
||||
protected GroupService groupService;
|
||||
protected BasicWorkflowItemService basicWorkflowItemService;
|
||||
protected BasicWorkflowService basicWorkflowService;
|
||||
@Autowired(required = true)
|
||||
protected WorkflowServiceFactory workflowServiceFactory;
|
||||
@Autowired(required = true)
|
||||
protected ConfigurationService configurationService;
|
||||
|
||||
/**
|
||||
* Initialize the bean (after dependency injection has already taken place).
|
||||
* Ensures the configurationService is injected, so that we can read the
|
||||
* settings from configuration
|
||||
* Called by "init-method" in Spring config.
|
||||
*
|
||||
* @throws Exception ...
|
||||
*/
|
||||
public void init() throws Exception {
|
||||
File cfgFile = new File(configurationService.getProperty("dspace.dir") +
|
||||
File.separator + "config" + File.separator +
|
||||
"workflow-curation.xml");
|
||||
try {
|
||||
loadTaskConfig(cfgFile);
|
||||
if (workflowServiceFactory.getWorkflowService() instanceof BasicWorkflowItemService) {
|
||||
basicWorkflowService = (BasicWorkflowService) workflowServiceFactory.getWorkflowService();
|
||||
basicWorkflowItemService = (BasicWorkflowItemService) workflowServiceFactory.getWorkflowItemService();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// debug e.printStackTrace();
|
||||
log.fatal("Unable to load config: " + cfgFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
protected WorkflowCuratorServiceImpl() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsCuration(BasicWorkflowItem wfi) {
|
||||
return getFlowStep(wfi) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doCuration(Context c, BasicWorkflowItem wfi)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
FlowStep step = getFlowStep(wfi);
|
||||
if (step != null) {
|
||||
Curator curator = new Curator();
|
||||
// are we going to perform, or just put on queue?
|
||||
if (step.queue != null) {
|
||||
// The queue runner will call setReporter
|
||||
for (Task task : step.tasks) {
|
||||
curator.addTask(task.name);
|
||||
}
|
||||
curator.queue(c, String.valueOf(wfi.getID()), step.queue);
|
||||
basicWorkflowItemService.update(c, wfi);
|
||||
return false;
|
||||
} else {
|
||||
PluginService plugins = CoreServiceFactory.getInstance()
|
||||
.getPluginService();
|
||||
try (Reporter reporter
|
||||
= (Reporter) plugins
|
||||
.getSinglePlugin(Reporter.class);) {
|
||||
curator.setReporter(reporter);
|
||||
boolean status = curate(curator, c, wfi);
|
||||
reporter.close();
|
||||
return status;
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to close report", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean curate(Curator curator, Context c, String wfId)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
BasicWorkflowItem wfi = basicWorkflowItemService.find(c, Integer.parseInt(wfId));
|
||||
if (wfi != null) {
|
||||
if (curate(curator, c, wfi)) {
|
||||
basicWorkflowService.advance(c, wfi, c.getCurrentUser(), false, true);
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
log.warn(LogManager.getHeader(c, "No workflow item found for id: " + wfId, null));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean curate(Curator curator, Context c, BasicWorkflowItem wfi)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
FlowStep step = getFlowStep(wfi);
|
||||
if (step != null) {
|
||||
// assign collection to item in case task needs it
|
||||
Item item = wfi.getItem();
|
||||
item.setOwningCollection(wfi.getCollection());
|
||||
for (Task task : step.tasks) {
|
||||
curator.addTask(task.name);
|
||||
curator.curate(item);
|
||||
int status = curator.getStatus(task.name);
|
||||
String result = curator.getResult(task.name);
|
||||
String action = "none";
|
||||
if (status == Curator.CURATE_FAIL) {
|
||||
// task failed - notify any contacts the task has assigned
|
||||
if (task.powers.contains("reject")) {
|
||||
action = "reject";
|
||||
}
|
||||
notifyContacts(c, wfi, task, "fail", action, result);
|
||||
// if task so empowered, reject submission and terminate
|
||||
if ("reject".equals(action)) {
|
||||
basicWorkflowService.sendWorkflowItemBackSubmission(c, wfi, c.getCurrentUser(),
|
||||
null, task.name + ": " + result);
|
||||
return false;
|
||||
}
|
||||
} else if (status == Curator.CURATE_SUCCESS) {
|
||||
if (task.powers.contains("approve")) {
|
||||
action = "approve";
|
||||
}
|
||||
notifyContacts(c, wfi, task, "success", action, result);
|
||||
if ("approve".equals(action)) {
|
||||
// cease further task processing and advance submission
|
||||
return true;
|
||||
}
|
||||
} else if (status == Curator.CURATE_ERROR) {
|
||||
notifyContacts(c, wfi, task, "error", action, result);
|
||||
}
|
||||
curator.clear();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected void notifyContacts(Context c, BasicWorkflowItem wfi, Task task,
|
||||
String status, String action, String message)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
List<EPerson> epa = resolveContacts(c, task.getContacts(status), wfi);
|
||||
if (epa.size() > 0) {
|
||||
basicWorkflowService.notifyOfCuration(c, wfi, epa, task.name, action, message);
|
||||
}
|
||||
}
|
||||
|
||||
protected List<EPerson> resolveContacts(Context c, List<String> contacts,
|
||||
BasicWorkflowItem wfi)
|
||||
throws AuthorizeException, IOException, SQLException {
|
||||
List<EPerson> epList = new ArrayList<EPerson>();
|
||||
for (String contact : contacts) {
|
||||
// decode contacts
|
||||
if ("$flowgroup".equals(contact)) {
|
||||
// special literal for current flowgoup
|
||||
int step = state2step(wfi.getState());
|
||||
// make sure this step exists
|
||||
if (step < 4) {
|
||||
Group wfGroup = collectionService.getWorkflowGroup(c, wfi.getCollection(), step);
|
||||
if (wfGroup != null) {
|
||||
epList.addAll(groupService.allMembers(c, wfGroup));
|
||||
}
|
||||
}
|
||||
} else if ("$colladmin".equals(contact)) {
|
||||
Group adGroup = wfi.getCollection().getAdministrators();
|
||||
if (adGroup != null) {
|
||||
epList.addAll(groupService.allMembers(c, adGroup));
|
||||
}
|
||||
} else if ("$siteadmin".equals(contact)) {
|
||||
EPerson siteEp = ePersonService.findByEmail(c,
|
||||
configurationService.getProperty("mail.admin"));
|
||||
if (siteEp != null) {
|
||||
epList.add(siteEp);
|
||||
}
|
||||
} else if (contact.indexOf("@") > 0) {
|
||||
// little shaky heuristic here - assume an eperson email name
|
||||
EPerson ep = ePersonService.findByEmail(c, contact);
|
||||
if (ep != null) {
|
||||
epList.add(ep);
|
||||
}
|
||||
} else {
|
||||
// assume it is an arbitrary group name
|
||||
Group group = groupService.findByName(c, contact);
|
||||
if (group != null) {
|
||||
epList.addAll(groupService.allMembers(c, group));
|
||||
}
|
||||
}
|
||||
}
|
||||
return epList;
|
||||
}
|
||||
|
||||
protected FlowStep getFlowStep(BasicWorkflowItem wfi) {
|
||||
Collection coll = wfi.getCollection();
|
||||
String key = tsMap.containsKey(coll.getHandle()) ? coll.getHandle() : "default";
|
||||
TaskSet ts = tsMap.get(key);
|
||||
if (ts != null) {
|
||||
int myStep = state2step(wfi.getState());
|
||||
for (FlowStep fstep : ts.steps) {
|
||||
if (fstep.step == myStep) {
|
||||
return fstep;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected int state2step(int state) {
|
||||
if (state <= BasicWorkflowServiceImpl.WFSTATE_STEP1POOL) {
|
||||
return 1;
|
||||
}
|
||||
if (state <= BasicWorkflowServiceImpl.WFSTATE_STEP2POOL) {
|
||||
return 2;
|
||||
}
|
||||
if (state <= BasicWorkflowServiceImpl.WFSTATE_STEP3POOL) {
|
||||
return 3;
|
||||
}
|
||||
return 4;
|
||||
}
|
||||
|
||||
protected int stepName2step(String name) {
|
||||
for (int i = 0; i < flowSteps.length; i++) {
|
||||
if (flowSteps[i].equals(name)) {
|
||||
return i + 1;
|
||||
}
|
||||
}
|
||||
// invalid stepName - log
|
||||
log.warn("Invalid step: '" + name + "' provided");
|
||||
return -1;
|
||||
}
|
||||
|
||||
protected void loadTaskConfig(File cfgFile) throws IOException {
|
||||
Map<String, String> collMap = new HashMap<String, String>();
|
||||
Map<String, TaskSet> setMap = new HashMap<String, TaskSet>();
|
||||
TaskSet taskSet = null;
|
||||
FlowStep flowStep = null;
|
||||
Task task = null;
|
||||
String type = null;
|
||||
try {
|
||||
XMLInputFactory factory = XMLInputFactory.newInstance();
|
||||
XMLStreamReader reader = factory.createXMLStreamReader(
|
||||
new FileInputStream(cfgFile), "UTF-8");
|
||||
while (reader.hasNext()) {
|
||||
int event = reader.next();
|
||||
if (event == START_ELEMENT) {
|
||||
String eName = reader.getLocalName();
|
||||
if ("mapping".equals(eName)) {
|
||||
collMap.put(reader.getAttributeValue(0),
|
||||
reader.getAttributeValue(1));
|
||||
} else if ("taskset".equals(eName)) {
|
||||
taskSet = new TaskSet(reader.getAttributeValue(0));
|
||||
} else if ("flowstep".equals(eName)) {
|
||||
int count = reader.getAttributeCount();
|
||||
String queue = (count == 2) ?
|
||||
reader.getAttributeValue(1) : null;
|
||||
flowStep = new FlowStep(reader.getAttributeValue(0), queue);
|
||||
} else if ("task".equals(eName)) {
|
||||
task = new Task(reader.getAttributeValue(0));
|
||||
} else if ("workflow".equals(eName)) {
|
||||
type = "power";
|
||||
} else if ("notify".equals(eName)) {
|
||||
type = reader.getAttributeValue(0);
|
||||
}
|
||||
} else if (event == CHARACTERS) {
|
||||
if (task != null) {
|
||||
if ("power".equals(type)) {
|
||||
task.addPower(reader.getText());
|
||||
} else {
|
||||
task.addContact(type, reader.getText());
|
||||
}
|
||||
}
|
||||
} else if (event == END_ELEMENT) {
|
||||
String eName = reader.getLocalName();
|
||||
if ("task".equals(eName)) {
|
||||
flowStep.addTask(task);
|
||||
task = null;
|
||||
} else if ("flowstep".equals(eName)) {
|
||||
taskSet.addStep(flowStep);
|
||||
} else if ("taskset".equals(eName)) {
|
||||
setMap.put(taskSet.setName, taskSet);
|
||||
}
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
// stitch maps together
|
||||
for (Map.Entry<String, String> collEntry : collMap.entrySet()) {
|
||||
if (!"none".equals(collEntry.getValue()) && setMap.containsKey(collEntry.getValue())) {
|
||||
tsMap.put(collEntry.getKey(), setMap.get(collEntry.getValue()));
|
||||
}
|
||||
}
|
||||
} catch (XMLStreamException xsE) {
|
||||
throw new IOException(xsE.getMessage(), xsE);
|
||||
}
|
||||
}
|
||||
|
||||
protected class TaskSet {
|
||||
public String setName = null;
|
||||
public List<FlowStep> steps = null;
|
||||
|
||||
public TaskSet(String setName) {
|
||||
this.setName = setName;
|
||||
steps = new ArrayList<FlowStep>();
|
||||
}
|
||||
|
||||
public void addStep(FlowStep step) {
|
||||
steps.add(step);
|
||||
}
|
||||
}
|
||||
|
||||
protected class FlowStep {
|
||||
public int step = -1;
|
||||
public String queue = null;
|
||||
public List<Task> tasks = null;
|
||||
|
||||
public FlowStep(String stepStr, String queueStr) {
|
||||
this.step = stepName2step(stepStr);
|
||||
this.queue = queueStr;
|
||||
tasks = new ArrayList<Task>();
|
||||
}
|
||||
|
||||
public void addTask(Task task) {
|
||||
tasks.add(task);
|
||||
}
|
||||
}
|
||||
|
||||
protected class Task {
|
||||
public String name = null;
|
||||
public List<String> powers = new ArrayList<String>();
|
||||
public Map<String, List<String>> contacts = new HashMap<String, List<String>>();
|
||||
|
||||
public Task(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public void addPower(String power) {
|
||||
powers.add(power);
|
||||
}
|
||||
|
||||
public void addContact(String status, String contact) {
|
||||
List<String> sContacts = contacts.get(status);
|
||||
if (sContacts == null) {
|
||||
sContacts = new ArrayList<String>();
|
||||
contacts.put(status, sContacts);
|
||||
}
|
||||
sContacts.add(contact);
|
||||
}
|
||||
|
||||
public List<String> getContacts(String status) {
|
||||
List<String> ret = contacts.get(status);
|
||||
return (ret != null) ? ret : new ArrayList<String>();
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,27 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate.factory;
|
||||
|
||||
import org.dspace.curate.service.WorkflowCuratorService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
|
||||
/**
|
||||
* Abstract factory to get services for the curate package, use CurateServiceFactory.getInstance() to retrieve an
|
||||
* implementation
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public abstract class CurateServiceFactory {
|
||||
|
||||
public abstract WorkflowCuratorService getWorkflowCuratorService();
|
||||
|
||||
public static CurateServiceFactory getInstance() {
|
||||
return DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("curateServiceFactory", CurateServiceFactory.class);
|
||||
}
|
||||
}
|
@@ -1,28 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate.factory;
|
||||
|
||||
import org.dspace.curate.service.WorkflowCuratorService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Factory implementation to get services for the curate package, use CurateServiceFactory.getInstance() to retrieve
|
||||
* an implementation
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class CurateServiceFactoryImpl extends CurateServiceFactory {
|
||||
|
||||
@Autowired(required = true)
|
||||
private WorkflowCuratorService workflowCurator;
|
||||
|
||||
@Override
|
||||
public WorkflowCuratorService getWorkflowCuratorService() {
|
||||
return workflowCurator;
|
||||
}
|
||||
}
|
@@ -1,76 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.curate.Curator;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
|
||||
/**
|
||||
* WorkflowCurator manages interactions between curation and workflow.
|
||||
* Specifically, it is invoked in WorkflowManager to allow the
|
||||
* performance of curation tasks during workflow.
|
||||
*
|
||||
* @author richardrodgers
|
||||
*/
|
||||
public interface WorkflowCuratorService {
|
||||
|
||||
|
||||
public boolean needsCuration(BasicWorkflowItem wfi);
|
||||
|
||||
/**
|
||||
* Determines and executes curation on a Workflow item.
|
||||
*
|
||||
* @param c the context
|
||||
* @param wfi the workflow item
|
||||
* @return true if curation was completed or not required,
|
||||
* false if tasks were queued for later completion,
|
||||
* or item was rejected
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean doCuration(Context c, BasicWorkflowItem wfi)
|
||||
throws AuthorizeException, IOException, SQLException;
|
||||
|
||||
|
||||
/**
|
||||
* Determines and executes curation of a Workflow item.
|
||||
*
|
||||
* @param curator the Curator object
|
||||
* @param c the user context
|
||||
* @param wfId the workflow id
|
||||
* @return true if curation was completed or not required,
|
||||
* false if no workflow item found for id
|
||||
* or item was rejected
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean curate(Curator curator, Context c, String wfId)
|
||||
throws AuthorizeException, IOException, SQLException;
|
||||
|
||||
/**
|
||||
* Determines and executes curation of a Workflow item.
|
||||
*
|
||||
* @param curator the Curator object
|
||||
* @param c the user context
|
||||
* @param wfi the workflow item
|
||||
* @return true if curation was completed or not required,
|
||||
* false if item was rejected
|
||||
* @throws AuthorizeException if authorization error
|
||||
* @throws IOException if IO error
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public boolean curate(Curator curator, Context c, BasicWorkflowItem wfi)
|
||||
throws AuthorizeException, IOException, SQLException;
|
||||
}
|
@@ -138,8 +138,9 @@ public class IndexEventConsumer implements Consumer {
|
||||
// became directly an item without giving us the chance to retrieve a
|
||||
// workflowitem... so we need to force the unindex of all the related data
|
||||
// before to index it again to be sure to don't leave any zombie in solr
|
||||
String detail =
|
||||
Constants.typeText[event.getSubjectType()] + "-" + event.getSubjectID().toString();
|
||||
IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance()
|
||||
.getIndexFactoryByType(Constants.typeText[event.getSubjectType()]);
|
||||
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
|
||||
uniqueIdsToDelete.add(detail);
|
||||
}
|
||||
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
|
||||
@@ -156,6 +157,13 @@ public class IndexEventConsumer implements Consumer {
|
||||
} else {
|
||||
log.debug("consume() adding event to update queue: " + event.toString());
|
||||
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
|
||||
|
||||
// If the event subject is a Collection and the event object is an Item,
|
||||
// also update the object in order to index mapped/unmapped Items
|
||||
if (subject != null &&
|
||||
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
|
||||
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -163,7 +171,9 @@ public class IndexEventConsumer implements Consumer {
|
||||
if (event.getSubjectType() == -1 || event.getSubjectID() == null) {
|
||||
log.warn("got null subject type and/or ID on DELETE event, skipping it.");
|
||||
} else {
|
||||
String detail = Constants.typeText[event.getSubjectType()] + "-" + event.getSubjectID().toString();
|
||||
IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance()
|
||||
.getIndexFactoryByType(Constants.typeText[event.getSubjectType()]);
|
||||
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
|
||||
log.debug("consume() adding event to delete queue: " + event.toString());
|
||||
uniqueIdsToDelete.add(detail);
|
||||
}
|
||||
|
@@ -39,10 +39,29 @@ public class SolrSearchCore {
|
||||
*/
|
||||
protected SolrClient solr = null;
|
||||
|
||||
/**
|
||||
* Default HTTP method to use for all Solr Requests (we prefer POST).
|
||||
* This REQUEST_METHOD should be used in all Solr queries, e.g.
|
||||
* solSearchCore.getSolr().query(myQuery, solrSearchCore.REQUEST_METHOD);
|
||||
*/
|
||||
public SolrRequest.METHOD REQUEST_METHOD = SolrRequest.METHOD.POST;
|
||||
|
||||
/**
|
||||
* Get access to current SolrClient. If no current SolrClient exists, a new one is initialized, see initSolr().
|
||||
* @return SolrClient Solr client
|
||||
*/
|
||||
public SolrClient getSolr() {
|
||||
if (solr == null) {
|
||||
initSolr();
|
||||
}
|
||||
|
||||
// If we are running Integration Tests using the EmbeddedSolrServer, we MUST override our default HTTP request
|
||||
// method to use GET instead of POST (the latter is what we prefer). Unfortunately, EmbeddedSolrServer does not
|
||||
// current work well with POST requests (see https://issues.apache.org/jira/browse/SOLR-12858). When that bug is
|
||||
// fixed, we should remove this 'if' statement so that tests also use POST.
|
||||
if (solr.getClass().getSimpleName().equals("EmbeddedSolrServer")) {
|
||||
REQUEST_METHOD = SolrRequest.METHOD.GET;
|
||||
}
|
||||
return solr;
|
||||
}
|
||||
|
||||
@@ -69,7 +88,7 @@ public class SolrSearchCore {
|
||||
" AND " + SearchUtils.RESOURCE_ID_FIELD + ":1");
|
||||
// Only return obj identifier fields in result doc
|
||||
solrQuery.setFields(SearchUtils.RESOURCE_TYPE_FIELD, SearchUtils.RESOURCE_ID_FIELD);
|
||||
solrServer.query(solrQuery, SolrRequest.METHOD.POST);
|
||||
solrServer.query(solrQuery, REQUEST_METHOD);
|
||||
|
||||
// As long as Solr initialized, check with DatabaseUtils to see
|
||||
// if a reindex is in order. If so, reindex everything
|
||||
|
@@ -36,7 +36,6 @@ import org.apache.commons.collections4.Transformer;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrRequest;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.response.FacetField;
|
||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||
@@ -362,7 +361,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
SolrQuery countQuery = new SolrQuery("*:*");
|
||||
countQuery.setRows(0); // don't actually request any data
|
||||
// Get the total amount of results
|
||||
QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery, SolrRequest.METHOD.POST);
|
||||
QueryResponse totalResponse = solrSearchCore.getSolr().query(countQuery,
|
||||
solrSearchCore.REQUEST_METHOD);
|
||||
long total = totalResponse.getResults().getNumFound();
|
||||
|
||||
int start = 0;
|
||||
@@ -378,7 +378,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
// Keep looping until we hit the total number of Solr docs
|
||||
while (start < total) {
|
||||
query.setStart(start);
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(query, SolrRequest.METHOD.POST);
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
for (SolrDocument doc : docs) {
|
||||
@@ -439,7 +439,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
SolrQuery solrQuery = new SolrQuery();
|
||||
solrQuery.set("spellcheck", true);
|
||||
solrQuery.set(SpellingParams.SPELLCHECK_BUILD, true);
|
||||
solrSearchCore.getSolr().query(solrQuery, SolrRequest.METHOD.POST);
|
||||
solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
|
||||
} catch (SolrServerException e) {
|
||||
//Make sure to also log the exception since this command is usually run from a crontab.
|
||||
log.error(e, e);
|
||||
@@ -519,7 +519,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
if (solrSearchCore.getSolr() == null) {
|
||||
return false;
|
||||
}
|
||||
rsp = solrSearchCore.getSolr().query(query, SolrRequest.METHOD.POST);
|
||||
rsp = solrSearchCore.getSolr().query(query, solrSearchCore.REQUEST_METHOD);
|
||||
} catch (SolrServerException e) {
|
||||
throw new SearchServiceException(e.getMessage(), e);
|
||||
}
|
||||
@@ -721,7 +721,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
SolrQuery solrQuery = resolveToSolrQuery(context, discoveryQuery);
|
||||
|
||||
|
||||
QueryResponse queryResponse = solrSearchCore.getSolr().query(solrQuery, SolrRequest.METHOD.POST);
|
||||
QueryResponse queryResponse = solrSearchCore.getSolr().query(solrQuery,
|
||||
solrSearchCore.REQUEST_METHOD);
|
||||
return retrieveResult(context, discoveryQuery, queryResponse);
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -1048,7 +1049,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
if (filterquery != null) {
|
||||
solrQuery.addFilterQuery(filterquery);
|
||||
}
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(solrQuery, SolrRequest.METHOD.POST);
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
|
||||
SolrDocumentList docs = rsp.getResults();
|
||||
|
||||
Iterator iter = docs.iterator();
|
||||
@@ -1152,7 +1153,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
|
||||
if (solrSearchCore.getSolr() == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(solrQuery, SolrRequest.METHOD.POST);
|
||||
QueryResponse rsp = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
|
||||
NamedList mltResults = (NamedList) rsp.getResponse().get("moreLikeThis");
|
||||
if (mltResults != null && mltResults.get(item.getType() + "-" + item.getID()) != null) {
|
||||
SolrDocumentList relatedDocs = (SolrDocumentList) mltResults.get(item.getType() + "-" + item.getID());
|
||||
|
@@ -8,19 +8,22 @@
|
||||
package org.dspace.discovery.indexobject;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
|
||||
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
||||
import org.apache.solr.handler.extraction.ExtractingParams;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
import org.apache.tika.parser.ParseContext;
|
||||
import org.apache.tika.parser.csv.TextAndCSVParser;
|
||||
import org.apache.tika.sax.BodyContentHandler;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.FullTextContentStreams;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
@@ -31,6 +34,7 @@ import org.dspace.discovery.indexobject.factory.IndexFactory;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.util.SolrUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* Basis factory interface implementation for indexing/retrieving any IndexableObject in the search core
|
||||
@@ -38,6 +42,8 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements IndexFactory<T, S> {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(IndexFactoryImpl.class);
|
||||
|
||||
@Autowired
|
||||
protected List<SolrServiceIndexPlugin> solrServiceIndexPlugins;
|
||||
@Autowired
|
||||
@@ -74,35 +80,60 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
* Write the document to the index under the appropriate unique identifier.
|
||||
*
|
||||
* @param doc the solr document to be written to the server
|
||||
* @param streams list of bitstream content streams DiscoverQueryBuilderTest.java:285
|
||||
* @param streams list of bitstream content streams
|
||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
||||
*/
|
||||
protected void writeDocument(SolrInputDocument doc, FullTextContentStreams streams)
|
||||
throws IOException, SolrServerException {
|
||||
final SolrClient solr = solrSearchCore.getSolr();
|
||||
if (solr != null) {
|
||||
// If full text stream(s) were passed in, we'll index them as part of the SolrInputDocument
|
||||
if (streams != null && !streams.isEmpty()) {
|
||||
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract");
|
||||
req.addContentStream(streams);
|
||||
// limit full text indexing to first 100,000 characters unless configured otherwise
|
||||
final int charLimit = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getIntProperty("discovery.solr.fulltext.charLimit",
|
||||
100000);
|
||||
|
||||
ModifiableSolrParams params = new ModifiableSolrParams();
|
||||
// Use Tika's Text parser as the streams are always from the TEXT bundle (i.e. already extracted text)
|
||||
// TODO: We may wish to consider using Tika to extract the text in the future.
|
||||
TextAndCSVParser tikaParser = new TextAndCSVParser();
|
||||
BodyContentHandler tikaHandler = new BodyContentHandler(charLimit);
|
||||
Metadata tikaMetadata = new Metadata();
|
||||
ParseContext tikaContext = new ParseContext();
|
||||
|
||||
//req.setParam(ExtractingParams.EXTRACT_ONLY, "true");
|
||||
for (String name : doc.getFieldNames()) {
|
||||
for (Object val : doc.getFieldValues(name)) {
|
||||
params.add(ExtractingParams.LITERALS_PREFIX + name, val.toString());
|
||||
// Use Apache Tika to parse the full text stream(s)
|
||||
try (InputStream fullTextStreams = streams.getStream()) {
|
||||
tikaParser.parse(fullTextStreams, tikaHandler, tikaMetadata, tikaContext);
|
||||
} catch (SAXException saxe) {
|
||||
// Check if this SAXException is just a notice that this file was longer than the character limit.
|
||||
// Unfortunately there is not a unique, public exception type to catch here. This error is thrown
|
||||
// by Tika's WriteOutContentHandler when it encounters a document longer than the char limit
|
||||
// https://github.com/apache/tika/blob/main/tika-core/src/main/java/org/apache/tika/sax/WriteOutContentHandler.java
|
||||
if (saxe.getMessage().contains("limit has been reached")) {
|
||||
// log that we only indexed up to that configured limit
|
||||
log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)."
|
||||
+ " Only the first {} characters were indexed.", charLimit);
|
||||
} else {
|
||||
throw new IOException("Tika parsing error. Could not index full text.", saxe);
|
||||
}
|
||||
} catch (TikaException ex) {
|
||||
throw new IOException("Tika parsing error. Could not index full text.", ex);
|
||||
}
|
||||
|
||||
// Write Tika metadata to "tika_meta_*" fields.
|
||||
// This metadata is not very useful right now, but we'll keep it just in case it becomes more useful.
|
||||
for (String name : tikaMetadata.names()) {
|
||||
for (String value : tikaMetadata.getValues(name)) {
|
||||
doc.addField("tika_meta_" + name, value);
|
||||
}
|
||||
}
|
||||
|
||||
req.setParams(params);
|
||||
req.setParam(ExtractingParams.UNKNOWN_FIELD_PREFIX, "attr_");
|
||||
req.setParam(ExtractingParams.MAP_PREFIX + "content", "fulltext");
|
||||
req.setParam(ExtractingParams.EXTRACT_FORMAT, "text");
|
||||
req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
|
||||
req.process(solr);
|
||||
} else {
|
||||
solr.add(doc);
|
||||
// Save (parsed) full text to "fulltext" field
|
||||
doc.addField("fulltext", tikaHandler.toString());
|
||||
}
|
||||
|
||||
// Add document to index
|
||||
solr.add(doc);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -15,6 +15,7 @@ import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.content.InProgressSubmission;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.indexobject.factory.CollectionIndexFactory;
|
||||
import org.dspace.discovery.indexobject.factory.InprogressSubmissionIndexFactory;
|
||||
import org.dspace.discovery.indexobject.factory.ItemIndexFactory;
|
||||
@@ -47,7 +48,7 @@ public abstract class InprogressSubmissionIndexFactoryImpl
|
||||
|
||||
@Override
|
||||
public void storeInprogressItemFields(Context context, SolrInputDocument doc,
|
||||
InProgressSubmission inProgressSubmission) throws SQLException {
|
||||
InProgressSubmission inProgressSubmission) throws SQLException, IOException {
|
||||
final Item item = inProgressSubmission.getItem();
|
||||
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
|
||||
EPerson submitter = inProgressSubmission.getSubmitter();
|
||||
@@ -61,6 +62,9 @@ public abstract class InprogressSubmissionIndexFactoryImpl
|
||||
// get the location string (for searching by collection & community)
|
||||
List<String> locations = indexableCollectionService.
|
||||
getCollectionLocations(context, inProgressSubmission.getCollection());
|
||||
|
||||
// Add item metadata
|
||||
indexableItemService.addDiscoveryFields(doc, context, item, SearchUtils.getAllDiscoveryConfigurations(item));
|
||||
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -19,8 +19,6 @@ import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.indexobject.factory.WorkflowItemIndexFactory;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
|
||||
@@ -75,10 +73,6 @@ public class WorkflowItemIndexFactoryImpl
|
||||
final SolrInputDocument doc = super.buildDocument(context, indexableObject);
|
||||
final XmlWorkflowItem workflowItem = indexableObject.getIndexedObject();
|
||||
final Item item = workflowItem.getItem();
|
||||
// Add the item metadata as configured
|
||||
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils
|
||||
.getAllDiscoveryConfigurations(workflowItem);
|
||||
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
|
||||
|
||||
String acvalue = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("discovery.facet.namedtype.workflow.item");
|
||||
@@ -118,4 +112,4 @@ public class WorkflowItemIndexFactoryImpl
|
||||
final XmlWorkflowItem xmlWorkflowItem = workflowItemService.find(context, Integer.parseInt(id));
|
||||
return xmlWorkflowItem == null ? Optional.empty() : Optional.of(new IndexableWorkflowItem(xmlWorkflowItem));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -19,8 +19,6 @@ import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SearchUtils;
|
||||
import org.dspace.discovery.configuration.DiscoveryConfiguration;
|
||||
import org.dspace.discovery.indexobject.factory.WorkspaceItemIndexFactory;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
@@ -71,12 +69,6 @@ public class WorkspaceItemIndexFactoryImpl
|
||||
acvalue = indexableObject.getTypeText();
|
||||
}
|
||||
addNamedResourceTypeIndex(doc, acvalue);
|
||||
final WorkspaceItem inProgressSubmission = indexableObject.getIndexedObject();
|
||||
|
||||
// Add the item metadata as configured
|
||||
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils
|
||||
.getAllDiscoveryConfigurations(inProgressSubmission);
|
||||
indexableItemService.addDiscoveryFields(doc, context, inProgressSubmission.getItem(), discoveryConfigurations);
|
||||
|
||||
return doc;
|
||||
}
|
||||
@@ -96,4 +88,4 @@ public class WorkspaceItemIndexFactoryImpl
|
||||
final WorkspaceItem workspaceItem = workspaceItemService.find(context, Integer.parseInt(id));
|
||||
return workspaceItem == null ? Optional.empty() : Optional.of(new IndexableWorkspaceItem(workspaceItem));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,7 @@
|
||||
*/
|
||||
package org.dspace.discovery.indexobject.factory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
@@ -31,5 +32,5 @@ public interface InprogressSubmissionIndexFactory<T extends IndexableInProgressS
|
||||
* @throws SQLException If database error
|
||||
*/
|
||||
void storeInprogressItemFields(Context context, SolrInputDocument doc, InProgressSubmission inProgressSubmission)
|
||||
throws SQLException;
|
||||
}
|
||||
throws SQLException, IOException;
|
||||
}
|
||||
|
@@ -51,24 +51,16 @@ import org.dspace.versioning.service.VersionHistoryService;
|
||||
import org.dspace.versioning.service.VersioningService;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.BasicWorkflowServiceImpl;
|
||||
import org.dspace.workflowbasic.factory.BasicWorkflowServiceFactory;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowService;
|
||||
import org.dspace.workflowbasic.service.TaskListItemService;
|
||||
import org.dspace.xmlworkflow.WorkflowConfigurationException;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.service.WorkflowRequirementsService;
|
||||
import org.dspace.xmlworkflow.service.XmlWorkflowService;
|
||||
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.CollectionRoleService;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.WorkflowItemRoleService;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -292,10 +284,6 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
if (constraintList.size() > 0) {
|
||||
// Check if the constraints we found should be deleted
|
||||
if (cascade) {
|
||||
boolean isBasicFramework = WorkflowServiceFactory.getInstance().getWorkflowService()
|
||||
instanceof BasicWorkflowService;
|
||||
boolean isXmlFramework = WorkflowServiceFactory.getInstance().getWorkflowService()
|
||||
instanceof XmlWorkflowService;
|
||||
Iterator<String> constraintsIterator = constraintList.iterator();
|
||||
|
||||
while (constraintsIterator.hasNext()) {
|
||||
@@ -332,10 +320,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
itemService.update(context, item);
|
||||
}
|
||||
}
|
||||
} else if (StringUtils.equals(tableName, "cwf_claimtask") && isXmlFramework) {
|
||||
} else if (StringUtils.equals(tableName, "cwf_claimtask")) {
|
||||
// Unclaim all XmlWorkflow tasks
|
||||
XmlWorkflowItemService xmlWorkflowItemService = XmlWorkflowServiceFactory
|
||||
.getInstance().getXmlWorkflowItemService();
|
||||
ClaimedTaskService claimedTaskService = XmlWorkflowServiceFactory
|
||||
.getInstance().getClaimedTaskService();
|
||||
XmlWorkflowService xmlWorkflowService = XmlWorkflowServiceFactory
|
||||
@@ -343,8 +329,6 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
WorkflowRequirementsService workflowRequirementsService = XmlWorkflowServiceFactory
|
||||
.getInstance().getWorkflowRequirementsService();
|
||||
|
||||
List<XmlWorkflowItem> xmlWorkflowItems = xmlWorkflowItemService
|
||||
.findBySubmitter(context, ePerson);
|
||||
List<ClaimedTask> claimedTasks = claimedTaskService.findByEperson(context, ePerson);
|
||||
|
||||
for (ClaimedTask task : claimedTasks) {
|
||||
@@ -360,43 +344,13 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
|
||||
.singletonList(tableName)));
|
||||
}
|
||||
}
|
||||
} else if (StringUtils.equals(tableName, "workflowitem") && isBasicFramework) {
|
||||
// Remove basicWorkflow workflowitem and unclaim them
|
||||
BasicWorkflowItemService basicWorkflowItemService = BasicWorkflowServiceFactory.getInstance()
|
||||
.getBasicWorkflowItemService();
|
||||
BasicWorkflowService basicWorkflowService = BasicWorkflowServiceFactory.getInstance()
|
||||
.getBasicWorkflowService();
|
||||
TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance()
|
||||
.getTaskListItemService();
|
||||
List<BasicWorkflowItem> workflowItems = basicWorkflowItemService.findByOwner(context, ePerson);
|
||||
for (BasicWorkflowItem workflowItem : workflowItems) {
|
||||
int state = workflowItem.getState();
|
||||
// unclaim tasks that are in the pool.
|
||||
if (state == BasicWorkflowServiceImpl.WFSTATE_STEP1
|
||||
|| state == BasicWorkflowServiceImpl.WFSTATE_STEP2
|
||||
|| state == BasicWorkflowServiceImpl.WFSTATE_STEP3) {
|
||||
log.info(LogManager.getHeader(context, "unclaim_workflow",
|
||||
"workflow_id=" + workflowItem.getID() + ", claiming EPerson is deleted"));
|
||||
basicWorkflowService.unclaim(context, workflowItem, context.getCurrentUser());
|
||||
// remove the EPerson from the list of persons that can (re-)claim the task
|
||||
// while we are doing it below, we must do this here as well as the previously
|
||||
// unclaimed tasks was put back into pool and we do not know the order the tables
|
||||
// are checked.
|
||||
taskListItemService.deleteByWorkflowItemAndEPerson(context, workflowItem, ePerson);
|
||||
}
|
||||
}
|
||||
} else if (StringUtils.equals(tableName, "resourcepolicy")) {
|
||||
// we delete the EPerson, it won't need any rights anymore.
|
||||
authorizeService.removeAllEPersonPolicies(context, ePerson);
|
||||
} else if (StringUtils.equals(tableName, "tasklistitem") && isBasicFramework) {
|
||||
// remove EPerson from the list of EPersons that may claim some specific workflow tasks.
|
||||
TaskListItemService taskListItemService = BasicWorkflowServiceFactory.getInstance()
|
||||
.getTaskListItemService();
|
||||
taskListItemService.deleteByEPerson(context, ePerson);
|
||||
} else if (StringUtils.equals(tableName, "cwf_pooltask") && isXmlFramework) {
|
||||
} else if (StringUtils.equals(tableName, "cwf_pooltask")) {
|
||||
PoolTaskService poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService();
|
||||
poolTaskService.deleteByEperson(context, ePerson);
|
||||
} else if (StringUtils.equals(tableName, "cwf_workflowitemrole") && isXmlFramework) {
|
||||
} else if (StringUtils.equals(tableName, "cwf_workflowitemrole")) {
|
||||
WorkflowItemRoleService workflowItemRoleService = XmlWorkflowServiceFactory.getInstance()
|
||||
.getWorkflowItemRoleService();
|
||||
workflowItemRoleService.deleteByEPerson(context, ePerson);
|
||||
|
@@ -227,8 +227,7 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
|
||||
* EPersons. Called by delete() to determine whether the eperson can
|
||||
* actually be deleted.
|
||||
*
|
||||
* An EPerson cannot be deleted if it exists in the item, workflowitem, or
|
||||
* tasklistitem tables.
|
||||
* An EPerson cannot be deleted if it exists in the item, resourcepolicy or workflow-related tables.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param ePerson EPerson to find
|
||||
|
180
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java
vendored
Normal file
180
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalDataProvider.java
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAJournal;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
|
||||
/**
|
||||
* This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External
|
||||
* data lookups.
|
||||
* This provider is a refactored version of SherpaJournalDataPublisher, rewritten to work with SHERPA v2 API
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAv2JournalDataProvider implements ExternalDataProvider {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SHERPAv2JournalDataProvider.class);
|
||||
|
||||
// Source identifier (configured in spring configuration)
|
||||
private String sourceIdentifier;
|
||||
|
||||
// SHERPA v2 API service (configured in spring configuration)
|
||||
SHERPAService sherpaService;
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise the client that we need to call the endpoint
|
||||
* @throws IOException If something goes wrong
|
||||
*/
|
||||
public void init() throws IOException {}
|
||||
|
||||
/**
|
||||
* Get a single journal based on a "title equals string" query
|
||||
* @param id The journal title which will be used as query string
|
||||
* @return external data object representing journal
|
||||
*/
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
// Sanitise ID / title query string (strips some special characters)
|
||||
id = SHERPAUtils.sanitiseQuery(id);
|
||||
|
||||
// Perform request using the SHERPA service (first row only)
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "title", "equals", id,
|
||||
0, 1);
|
||||
|
||||
// If a journal was returned, get it and convert it to an ExternalDataObject
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
SHERPAJournal sherpaJournal = sherpaResponse.getJournals().get(0);
|
||||
|
||||
ExternalDataObject externalDataObject = constructExternalDataObjectFromSherpaJournal(sherpaJournal);
|
||||
return Optional.of(externalDataObject);
|
||||
}
|
||||
|
||||
// If no journal was returned, return an empty Optional object
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct ExternalDataObject populated with journal metadata from the SHERPA v2 API response
|
||||
* @param sherpaJournal
|
||||
* @return external data object representing a journal
|
||||
*/
|
||||
private ExternalDataObject constructExternalDataObjectFromSherpaJournal(SHERPAJournal sherpaJournal) {
|
||||
// Set up external object
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject();
|
||||
externalDataObject.setSource(sourceIdentifier);
|
||||
// Set journal title in external object
|
||||
if (CollectionUtils.isNotEmpty(sherpaJournal.getTitles())) {
|
||||
String journalTitle = sherpaJournal.getTitles().get(0);
|
||||
externalDataObject.setId(sherpaJournal.getTitles().get(0));
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "title", null, null, journalTitle));
|
||||
externalDataObject.setValue(journalTitle);
|
||||
externalDataObject.setDisplayValue(journalTitle);
|
||||
}
|
||||
// Set ISSNs in external object
|
||||
if (CollectionUtils.isNotEmpty(sherpaJournal.getIssns())) {
|
||||
String issn = sherpaJournal.getIssns().get(0);
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "issn", null, issn));
|
||||
|
||||
}
|
||||
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA v2 API for journal results based on a 'contains word' query
|
||||
* @param query The query for the search
|
||||
* @param start The start of the search
|
||||
* @param limit The max amount of records to be returned by the search
|
||||
* @return a list of external data objects
|
||||
*/
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
// Search SHERPA for journals with the query term in the title
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "title",
|
||||
"contains word", query, start, limit);
|
||||
|
||||
// Convert SHERPA response to a Collection and return the list
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
List<ExternalDataObject> list = sherpaResponse.getJournals().stream().map(
|
||||
sherpaJournal -> constructExternalDataObjectFromSherpaJournal(sherpaJournal)).collect(
|
||||
Collectors.toList());
|
||||
|
||||
// Unlike the previous API version we can request offset and limit, so no need to build a
|
||||
// sublist from this list, we can just return the list.
|
||||
return list;
|
||||
}
|
||||
|
||||
// If nothing has been returned yet, return an empty list
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of results returned from a SHERPA v2 publication search
|
||||
* @param query The query to be search on and give the total amount of results
|
||||
* @return int representing number of journal results
|
||||
*/
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
// Search SHERPA for journals with the query term in the title
|
||||
// Limit = 0 means the limit parameter will not be added to the API query
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest("publication", "title",
|
||||
"contains word", query, 0, 0);
|
||||
|
||||
// Get number of journals returned in response
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
return sherpaResponse.getJournals().size();
|
||||
}
|
||||
|
||||
// If other checks have failed return 0
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the sourceIdentifier
|
||||
* @param sourceIdentifier The sourceIdentifier to be set on this SHERPAv2JournalDataProvider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the SHERPA service
|
||||
* @param sherpaService The sherpaService to be set on this SHERPAv2JournalDataProvider
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
|
||||
}
|
193
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java
vendored
Normal file
193
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2JournalISSNDataProvider.java
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAJournal;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
|
||||
/**
|
||||
* This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External
|
||||
* data lookups based on ISSN (to match functinoality offered by legacy SHERPASubmitService for policy lookups
|
||||
* at the time of submission).
|
||||
* This provider is a refactored version of SherpaJournalDataPublisher, rewritten to work with SHERPA v2 API
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAv2JournalISSNDataProvider implements ExternalDataProvider {
|
||||
|
||||
private static final Logger log =
|
||||
org.apache.logging.log4j.LogManager.getLogger(
|
||||
org.dspace.external.provider.impl.SHERPAv2JournalISSNDataProvider.class);
|
||||
|
||||
// Source identifier (configured in spring configuration)
|
||||
private String sourceIdentifier;
|
||||
|
||||
// SHERPA v2 API service (configured in spring configuration)
|
||||
SHERPAService sherpaService;
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise the provider - this no longer starts client since that is handled by SHERPAService
|
||||
* @throws IOException If something goes wrong
|
||||
*/
|
||||
public void init() throws IOException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single journal based on a "issn equals string" query
|
||||
* @param issn The ISSN which will be used as query string
|
||||
* @return external data object representing journal
|
||||
*/
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String issn) {
|
||||
|
||||
// Sanitise ID / title query string (strips some special characters)
|
||||
issn = SHERPAUtils.sanitiseQuery(issn);
|
||||
|
||||
log.debug("Searching SHERPA for ISSN: " + issn);
|
||||
|
||||
// Get SHERPA response from base service
|
||||
// Get SHERPA response from the API for all objects matching this ISSN
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest(
|
||||
"publication", "issn", "equals", issn, 0, 1);
|
||||
|
||||
// Construct external data objects
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
SHERPAJournal sherpaJournal = sherpaResponse.getJournals().get(0);
|
||||
ExternalDataObject externalDataObject = constructExternalDataObjectFromSherpaJournal(sherpaJournal);
|
||||
return Optional.of(externalDataObject);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct ExternalDataObject populated with journal metadata from the SHERPA v2 API response
|
||||
* @param sherpaJournal
|
||||
* @return external data object representing a journal
|
||||
*/
|
||||
private ExternalDataObject constructExternalDataObjectFromSherpaJournal(SHERPAJournal sherpaJournal) {
|
||||
// Set up external object
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject();
|
||||
externalDataObject.setSource(sourceIdentifier);
|
||||
// Set journal title in external object
|
||||
if (CollectionUtils.isNotEmpty(sherpaJournal.getTitles())) {
|
||||
String journalTitle = sherpaJournal.getTitles().get(0);
|
||||
externalDataObject.setId(sherpaJournal.getTitles().get(0));
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "title", null, null, journalTitle));
|
||||
externalDataObject.setValue(journalTitle);
|
||||
externalDataObject.setDisplayValue(journalTitle);
|
||||
}
|
||||
// Set ISSNs in external object
|
||||
if (CollectionUtils.isNotEmpty(sherpaJournal.getIssns())) {
|
||||
String issn = sherpaJournal.getIssns().get(0);
|
||||
externalDataObject.setId(issn);
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "issn", null, issn));
|
||||
|
||||
}
|
||||
|
||||
log.debug("New external data object. Title=" + externalDataObject.getValue() + ". ID="
|
||||
+ externalDataObject.getId());
|
||||
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA v2 API for journal results based on a 'contains word' query
|
||||
* @param query The term to query for the search
|
||||
* @param start The start of the search
|
||||
* @param limit The max amount of records to be returned by the search
|
||||
* @return a list of external data objects
|
||||
*/
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
|
||||
// Get SHERPA response from the API for all objects matching this ISSN
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest(
|
||||
"publication", "issn", "equals", query, start, limit);
|
||||
|
||||
// Construct a list of external data objects and return it
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
log.debug("Found " + sherpaResponse.getJournals().size() + " matching journals for ISSN " + query);
|
||||
List<ExternalDataObject> list = sherpaResponse.getJournals().stream().map(
|
||||
sherpaJournal -> constructExternalDataObjectFromSherpaJournal(sherpaJournal)).collect(
|
||||
Collectors.toList());
|
||||
|
||||
// Unlike the previous API version we can request offset and limit, so no need to build aF
|
||||
// sublist from this list, we can just return the list.
|
||||
return list;
|
||||
} else {
|
||||
log.debug("Empty response from SHERPA v2 API for ISSN " + query);
|
||||
}
|
||||
|
||||
// If nothing was returned from the response, return an empty list
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of results returned from a SHERPA v2 publication search
|
||||
* @param issn The query to be search on and give the total amount of results
|
||||
* @return int representing number of journal results
|
||||
*/
|
||||
@Override
|
||||
public int getNumberOfResults(String issn) {
|
||||
|
||||
// Get SHERPA response from the API for all objects matching this ISSN.
|
||||
// The limit of 0 means a limit parameter will not be added to the API query
|
||||
SHERPAResponse sherpaResponse = sherpaService.performRequest(
|
||||
"publication", "issn", "equals", issn, 0, 0);
|
||||
|
||||
// Return the size of the journal collection
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
return sherpaResponse.getJournals().size();
|
||||
}
|
||||
|
||||
// If other checks have failed return 0
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the sourceIdentifier
|
||||
* @param sourceIdentifier The sourceIdentifier to be set on this SHERPAv2JournalISSNDataProvider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the SHERPA Service
|
||||
* @param sherpaService THe SHERPA service to be set on this SHERPAv2JournalISSNDataProvider
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
}
|
180
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java
vendored
Normal file
180
dspace-api/src/main/java/org/dspace/external/provider/impl/SHERPAv2PublisherDataProvider.java
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAService;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisher;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
|
||||
/**
|
||||
* This class is the implementation of the ExternalDataProvider interface that will deal with SHERPAPublisher External
|
||||
* data lookups.
|
||||
* This provider is a refactored version of SherpaPublisherDataPublisher, rewritten to work with SHERPA v2 API
|
||||
*
|
||||
* It uses a more simple response object than the normal publication / policy search
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class SHERPAv2PublisherDataProvider implements ExternalDataProvider {
|
||||
|
||||
// Logger
|
||||
private static final Logger log =
|
||||
org.apache.logging.log4j.LogManager.getLogger(SHERPAv2PublisherDataProvider.class);
|
||||
|
||||
// Source identifier (eg 'sherpaPublisher') configured in spring configuration
|
||||
private String sourceIdentifier;
|
||||
// SHERPA service configured in spring configuration
|
||||
private SHERPAService sherpaService;
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise the client that we need to call the endpoint
|
||||
* @throws IOException If something goes wrong
|
||||
*/
|
||||
public void init() throws IOException {}
|
||||
|
||||
/**
|
||||
* Get a single publisher based on a "id equals string" query
|
||||
* @param id The publisher ID which will be used as query string
|
||||
* @return external data object representing publisher
|
||||
*/
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
// Sanitise the given ID / title query
|
||||
id = SHERPAUtils.sanitiseQuery(id);
|
||||
|
||||
// Search for publishers matching this ID in SHERPA. Limit to 1 result since this is for a single object
|
||||
SHERPAPublisherResponse sherpaResponse =
|
||||
sherpaService.performPublisherRequest("publisher", "id", "equals", id, 0, 1);
|
||||
|
||||
// If there is at least one publisher, retrieve it and transform it to an ExternalDataObject
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||
SHERPAPublisher sherpaPublisher = sherpaResponse.getPublishers().get(0);
|
||||
// Construct external data object from returned publisher
|
||||
ExternalDataObject externalDataObject = constructExternalDataObjectFromSherpaPublisher(sherpaPublisher);
|
||||
return Optional.of(externalDataObject);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Search SHERPA v2 API for publisher results based on a 'contains word' query for publisher name
|
||||
* @param query The query for the search
|
||||
* @param start The start of the search
|
||||
* @param limit The max amount of records to be returned by the search
|
||||
* @return a list of external data objects
|
||||
*/
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
// Search SHERPA for publishers with the query term in the title (name)
|
||||
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest(
|
||||
"publication", "title", "contains word", query, start, limit);
|
||||
|
||||
// If at least one publisher was found, convert to a list of ExternalDataObjects and return
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||
List<ExternalDataObject> list = sherpaResponse.getPublishers().stream().map(
|
||||
sherpaPublisher -> constructExternalDataObjectFromSherpaPublisher(sherpaPublisher)).collect(
|
||||
Collectors.toList());
|
||||
|
||||
// Unlike the previous API version we can request offset and limit, so no need to build a
|
||||
// sublist from this list, we can just return the list.
|
||||
return list;
|
||||
}
|
||||
|
||||
// Return an empty list if nothing was found
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private ExternalDataObject constructExternalDataObjectFromSherpaPublisher(SHERPAPublisher sherpaPublisher) {
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject();
|
||||
externalDataObject.setSource(sourceIdentifier);
|
||||
|
||||
// Set publisher name
|
||||
if (StringUtils.isNotBlank(sherpaPublisher.getName())) {
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "title", null, null, sherpaPublisher.getName()));
|
||||
externalDataObject.setDisplayValue(sherpaPublisher.getName());
|
||||
externalDataObject.setValue(sherpaPublisher.getName());
|
||||
}
|
||||
// Set publisher ID
|
||||
if (StringUtils.isNotBlank(sherpaPublisher.getIdentifier())) {
|
||||
externalDataObject.setId(sherpaPublisher.getIdentifier());
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "sherpaPublisher", null,
|
||||
sherpaPublisher.getIdentifier()));
|
||||
}
|
||||
|
||||
// Set home URL
|
||||
if (StringUtils.isNotBlank(sherpaPublisher.getUri())) {
|
||||
externalDataObject.addMetadata(new MetadataValueDTO(
|
||||
"dc", "identifier", "other", null, sherpaPublisher.getUri()));
|
||||
}
|
||||
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of results returned from a SHERPA v2 publication search
|
||||
* @param query The query to be search on and give the total amount of results
|
||||
* @return int representing number of publisher results
|
||||
*/
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
// Search SHERPA for publishers with the query term in the title (name)
|
||||
// a limit of 0 means the limit parameter won't be added to the API query
|
||||
SHERPAPublisherResponse sherpaResponse = sherpaService.performPublisherRequest(
|
||||
"publication", "title", "contains word", query, 0, 0);
|
||||
|
||||
// Return the number of publishers in the response object
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getPublishers())) {
|
||||
return sherpaResponse.getPublishers().size();
|
||||
}
|
||||
|
||||
// If other checks have failed return 0
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the sourceIdentifier
|
||||
* @param sourceIdentifier The sourceIdentifier to be set on this SHERPAv2PublisherDataProvider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the SHERPA Service
|
||||
* @param sherpaService THe SHERPA service to be set on this SHERPAv2PublisherDataProvider
|
||||
*/
|
||||
public void setSherpaService(SHERPAService sherpaService) {
|
||||
this.sherpaService = sherpaService;
|
||||
}
|
||||
|
||||
}
|
@@ -1,232 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.client.utils.URLEncodedUtils;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAJournal;
|
||||
import org.dspace.app.sherpa.SHERPAResponse;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
|
||||
/**
|
||||
* This class is the implementation of the ExternalDataProvider interface that
|
||||
* will deal with SherpaJournal External data lookups.
|
||||
*/
|
||||
public class SherpaJournalDataProvider implements ExternalDataProvider {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SherpaJournalDataProvider.class);
|
||||
|
||||
private String url;
|
||||
private String sourceIdentifier;
|
||||
private String apiKey;
|
||||
|
||||
private CloseableHttpClient client = null;
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialise the client that we need to call the endpoint
|
||||
* @throws IOException If something goes wrong
|
||||
*/
|
||||
public void init() throws IOException {
|
||||
HttpClientBuilder builder = HttpClientBuilder.create();
|
||||
// httpclient 4.3+ doesn't appear to have any sensible defaults any more. Setting conservative defaults as
|
||||
// not to hammer the SHERPA service too much.
|
||||
client = builder
|
||||
.disableAutomaticRetries()
|
||||
.setMaxConnTotal(5)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
|
||||
HttpGet method = null;
|
||||
SHERPAResponse sherpaResponse = null;
|
||||
int timeout = 5000;
|
||||
URIBuilder uriBuilder = null;
|
||||
try {
|
||||
uriBuilder = new URIBuilder(url);
|
||||
uriBuilder.addParameter("jtitle", id);
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
uriBuilder.addParameter("ak", apiKey);
|
||||
}
|
||||
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
method.setConfig(RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(timeout)
|
||||
.setConnectTimeout(timeout)
|
||||
.setSocketTimeout(timeout)
|
||||
.build());
|
||||
// Execute the method.
|
||||
|
||||
HttpResponse response = client.execute(method);
|
||||
int statusCode = response.getStatusLine().getStatusCode();
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO return not OK status: "
|
||||
+ statusCode);
|
||||
}
|
||||
|
||||
HttpEntity responseBody = response.getEntity();
|
||||
|
||||
if (null != responseBody) {
|
||||
sherpaResponse = new SHERPAResponse(responseBody.getContent());
|
||||
} else {
|
||||
sherpaResponse = new SHERPAResponse("SHERPA/RoMEO returned no response");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
}
|
||||
|
||||
if (sherpaResponse == null) {
|
||||
sherpaResponse = new SHERPAResponse(
|
||||
"Error processing the SHERPA/RoMEO answer");
|
||||
}
|
||||
if (CollectionUtils.isNotEmpty(sherpaResponse.getJournals())) {
|
||||
SHERPAJournal sherpaJournal = sherpaResponse.getJournals().get(0);
|
||||
|
||||
ExternalDataObject externalDataObject = constructExternalDataObjectFromSherpaJournal(sherpaJournal);
|
||||
return Optional.of(externalDataObject);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private ExternalDataObject constructExternalDataObjectFromSherpaJournal(SHERPAJournal sherpaJournal) {
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject();
|
||||
externalDataObject.setSource(sourceIdentifier);
|
||||
externalDataObject.setId(sherpaJournal.getTitle());
|
||||
externalDataObject
|
||||
.addMetadata(new MetadataValueDTO("dc", "title", null, null, sherpaJournal.getTitle()));
|
||||
externalDataObject
|
||||
.addMetadata(new MetadataValueDTO("dc", "identifier", "issn", null, sherpaJournal.getIssn()));
|
||||
externalDataObject.setValue(sherpaJournal.getTitle());
|
||||
externalDataObject.setDisplayValue(sherpaJournal.getTitle());
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
// query args to add to SHERPA/RoMEO request URL
|
||||
HttpGet get = constructHttpGet(query);
|
||||
try ( CloseableHttpClient hc = HttpClientBuilder.create().build(); ) {
|
||||
HttpResponse response = hc.execute(get);
|
||||
if (response.getStatusLine().getStatusCode() == 200) {
|
||||
|
||||
SHERPAResponse sherpaResponse = new SHERPAResponse(response.getEntity().getContent());
|
||||
List<ExternalDataObject> list = sherpaResponse.getJournals().stream().map(
|
||||
sherpaJournal -> constructExternalDataObjectFromSherpaJournal(sherpaJournal)).collect(
|
||||
Collectors.toList());
|
||||
// This is because Sherpa returns everything by default so we can't specifiy a start and limit
|
||||
// in the query itself
|
||||
return list.subList(start, Math.min(start + limit, list.size()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
return Collections.emptyList();
|
||||
} finally {
|
||||
get.releaseConnection();
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private HttpGet constructHttpGet(String query) {
|
||||
List<BasicNameValuePair> args = new ArrayList<>();
|
||||
args.add(new BasicNameValuePair("jtitle", query));
|
||||
args.add(new BasicNameValuePair("qtype", "contains"));
|
||||
args.add(new BasicNameValuePair("ak", apiKey));
|
||||
String srUrl = url + "?" + URLEncodedUtils.format(args, "UTF8");
|
||||
return new HttpGet(srUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
HttpGet get = constructHttpGet(query);
|
||||
try ( CloseableHttpClient hc = HttpClientBuilder.create().build(); ) {
|
||||
HttpResponse response = hc.execute(get);
|
||||
if (response.getStatusLine().getStatusCode() == 200) {
|
||||
SHERPAResponse sherpaResponse = new SHERPAResponse(response.getEntity().getContent());
|
||||
return sherpaResponse.getNumHits();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
return 0;
|
||||
} finally {
|
||||
get.releaseConnection();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the sourceIdentifier
|
||||
* @param sourceIdentifier The sourceIdentifier to be set on this SherpaJournalDataProvider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the url
|
||||
* @return the url value of this SherpaJournalDataProvider
|
||||
*/
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the url
|
||||
* @param url The url to be set on this SherpaJournalDataProvider
|
||||
*/
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the apiKey
|
||||
* @return the apiKey value of this SherpaJournalDataProvider
|
||||
*/
|
||||
public String getApiKey() {
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the apiKey
|
||||
* @param apiKey The apiKey to be set on this SherpaJournalDataProvider
|
||||
*/
|
||||
public void setApiKey(String apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
}
|
@@ -1,192 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URLEncodedUtils;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.message.BasicNameValuePair;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.sherpa.SHERPAPublisher;
|
||||
import org.dspace.app.sherpa.SHERPAResponse;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
|
||||
/**
|
||||
* This class is the implementation of the ExternalDataProvider interface that
|
||||
* will deal with SherpaPublisher External data lookups.
|
||||
*/
|
||||
public class SherpaPublisherDataProvider implements ExternalDataProvider {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SherpaPublisherDataProvider.class);
|
||||
|
||||
private String sourceIdentifier;
|
||||
private String url;
|
||||
private String apiKey;
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
List<BasicNameValuePair> args = new ArrayList<>();
|
||||
args.add(new BasicNameValuePair("id", id));
|
||||
args.add(new BasicNameValuePair("ak", apiKey));
|
||||
String srUrl = url + "?" + URLEncodedUtils.format(args, "UTF8");
|
||||
HttpGet get = new HttpGet(srUrl);
|
||||
try ( CloseableHttpClient hc = HttpClientBuilder.create().build(); ) {
|
||||
HttpResponse response = hc.execute(get);
|
||||
if (response.getStatusLine().getStatusCode() == 200) {
|
||||
SHERPAResponse sherpaResponse = new SHERPAResponse(response.getEntity().getContent());
|
||||
List<SHERPAPublisher> list = sherpaResponse.getPublishers();
|
||||
if (CollectionUtils.isNotEmpty(list)) {
|
||||
return Optional.of(constructExternalDataObjectFromSherpaPublisher(list.get(0)));
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
return null;
|
||||
} finally {
|
||||
get.releaseConnection();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
HttpGet get = constructHttpGet(query);
|
||||
try ( CloseableHttpClient hc = HttpClientBuilder.create().build(); ) {
|
||||
HttpResponse response = hc.execute(get);
|
||||
if (response.getStatusLine().getStatusCode() == 200) {
|
||||
SHERPAResponse sherpaResponse = new SHERPAResponse(response.getEntity().getContent());
|
||||
List<ExternalDataObject> list = sherpaResponse.getPublishers().stream().map(
|
||||
sherpaPublisher -> constructExternalDataObjectFromSherpaPublisher(sherpaPublisher)).collect(
|
||||
Collectors.toList());
|
||||
|
||||
// This is because Sherpa returns everything by default so we can't specifiy a start and limit
|
||||
// in the query itself
|
||||
return list.subList(start, Math.min(start + limit, list.size()));
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
return Collections.emptyList();
|
||||
} finally {
|
||||
get.releaseConnection();
|
||||
}
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private HttpGet constructHttpGet(String query) {
|
||||
List<BasicNameValuePair> args = new ArrayList<>();
|
||||
args.add(new BasicNameValuePair("pub", query));
|
||||
args.add(new BasicNameValuePair("qtype", "all"));
|
||||
args.add(new BasicNameValuePair("ak", apiKey));
|
||||
String srUrl = url + "?" + URLEncodedUtils.format(args, "UTF8");
|
||||
return new HttpGet(srUrl);
|
||||
}
|
||||
|
||||
private ExternalDataObject constructExternalDataObjectFromSherpaPublisher(SHERPAPublisher sherpaPublisher) {
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject();
|
||||
externalDataObject.setSource(sourceIdentifier);
|
||||
|
||||
//Text value == name
|
||||
externalDataObject.addMetadata(new MetadataValueDTO("dc", "title", null, null, sherpaPublisher.getName()));
|
||||
externalDataObject.setDisplayValue(sherpaPublisher.getName());
|
||||
externalDataObject.setValue(sherpaPublisher.getName());
|
||||
if (StringUtils.isNotBlank(sherpaPublisher.getId())) {
|
||||
externalDataObject.setId(sherpaPublisher.getId());
|
||||
externalDataObject
|
||||
.addMetadata(
|
||||
new MetadataValueDTO("dc", "identifier", "sherpaPublisher", null, sherpaPublisher.getId()));
|
||||
}
|
||||
|
||||
//Text value == homeurl
|
||||
externalDataObject
|
||||
.addMetadata(new MetadataValueDTO("dc", "identifier", "other", null, sherpaPublisher.getHomeurl()));
|
||||
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
HttpGet get = constructHttpGet(query);
|
||||
try ( CloseableHttpClient hc = HttpClientBuilder.create().build(); ) {
|
||||
HttpResponse response = hc.execute(get);
|
||||
if (response.getStatusLine().getStatusCode() == 200) {
|
||||
|
||||
SHERPAResponse sherpaResponse = new SHERPAResponse(response.getEntity().getContent());
|
||||
return sherpaResponse.getNumHits();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("SHERPA/RoMEO query failed: ", e);
|
||||
return 0;
|
||||
} finally {
|
||||
get.releaseConnection();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the sourceIdentifier
|
||||
* @param sourceIdentifier The sourceIdentifier to be set on this SherpaPublisherDataProvider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the url
|
||||
* @return the url value of this SherpaPublisherDataProvider
|
||||
*/
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the url
|
||||
* @param url The url to be set on this SherpaPublisherDataProvider
|
||||
*/
|
||||
public void setUrl(String url) {
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic getter for the apiKey
|
||||
* @return the apiKey value of this SherpaPublisherDataProvider
|
||||
*/
|
||||
public String getApiKey() {
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic setter for the apiKey
|
||||
* @param apiKey The apiKey to be set on this SherpaPublisherDataProvider
|
||||
*/
|
||||
public void setApiKey(String apiKey) {
|
||||
this.apiKey = apiKey;
|
||||
}
|
||||
}
|
@@ -10,6 +10,8 @@ package org.dspace.handle;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -57,6 +59,13 @@ public class HandleServiceImpl implements HandleService {
|
||||
@Autowired
|
||||
protected SiteService siteService;
|
||||
|
||||
private static final Pattern[] IDENTIFIER_PATTERNS = {
|
||||
Pattern.compile("^hdl:(.*)$"),
|
||||
Pattern.compile("^info:hdl/(.*)$"),
|
||||
Pattern.compile("^https?://hdl\\.handle\\.net/(.*)$"),
|
||||
Pattern.compile("^https?://.+/handle/(.*)$")
|
||||
};
|
||||
|
||||
/**
|
||||
* Public Constructor
|
||||
*/
|
||||
@@ -376,4 +385,39 @@ public class HandleServiceImpl implements HandleService {
|
||||
public int countTotal(Context context) throws SQLException {
|
||||
return handleDAO.countRows(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String parseHandle(String identifier) {
|
||||
if (identifier == null) {
|
||||
return null;
|
||||
}
|
||||
if (identifier.startsWith(getPrefix() + "/")) {
|
||||
// prefix is the equivalent of 123456789 in 123456789/???; don't strip
|
||||
return identifier;
|
||||
}
|
||||
|
||||
String canonicalPrefix = configurationService.getProperty("handle.canonical.prefix");
|
||||
if (identifier.startsWith(canonicalPrefix + "/")) {
|
||||
// prefix is the equivalent of https://hdl.handle.net/ in https://hdl.handle.net/123456789/???; strip
|
||||
return StringUtils.stripStart(identifier, canonicalPrefix);
|
||||
}
|
||||
|
||||
for (Pattern pattern : IDENTIFIER_PATTERNS) {
|
||||
Matcher matcher = pattern.matcher(identifier);
|
||||
if (matcher.matches()) {
|
||||
return matcher.group(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Check additional prefixes supported in the config file
|
||||
String[] additionalPrefixes = configurationService.getArrayProperty("handle.additional.prefixes");
|
||||
for (String additionalPrefix : additionalPrefixes) {
|
||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
||||
// prefix is the equivalent of 123456789 in 123456789/???; don't strip
|
||||
return identifier;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -181,4 +181,15 @@ public interface HandleService {
|
||||
public void modifyHandleDSpaceObject(Context context, String handle, DSpaceObject newOwner) throws SQLException;
|
||||
|
||||
int countTotal(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Format a handle ~
|
||||
* - hdl:123456789/1 -> 123456789/1
|
||||
* - info:hdl/123456789/1 -> 123456789/1
|
||||
* - https://hdl.handle.net/123456789/1 -> 123456789/1
|
||||
*
|
||||
* @param identifier
|
||||
* @return
|
||||
*/
|
||||
String parseHandle(String identifier);
|
||||
}
|
||||
|
@@ -33,8 +33,8 @@ import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.workflowbasic.factory.BasicWorkflowServiceFactory;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
|
||||
|
||||
/**
|
||||
* @author LINDAT/CLARIN dev team
|
||||
@@ -48,8 +48,8 @@ public class ItemCheck extends Check {
|
||||
private MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
private WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
|
||||
private BasicWorkflowItemService basicWorkflowItemService =
|
||||
BasicWorkflowServiceFactory.getInstance().getBasicWorkflowItemService();
|
||||
private XmlWorkflowItemService workflowItemService =
|
||||
XmlWorkflowServiceFactory.getInstance().getXmlWorkflowItemService();
|
||||
private HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
private GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
@@ -95,7 +95,7 @@ public class ItemCheck extends Check {
|
||||
|
||||
ret += String.format(
|
||||
"\tWaiting for approval (workflow items): %d\n",
|
||||
basicWorkflowItemService.countTotal(context));
|
||||
workflowItemService.countAll(context));
|
||||
|
||||
} catch (SQLException e) {
|
||||
error(e);
|
||||
@@ -132,7 +132,7 @@ public class ItemCheck extends Check {
|
||||
sb.append(String.format("Count %-14s: %s\n", "Group",
|
||||
String.valueOf(groupService.countTotal(context))));
|
||||
sb.append(String.format("Count %-14s: %s\n", "BasicWorkflowItem",
|
||||
String.valueOf(basicWorkflowItemService.countTotal(context))));
|
||||
String.valueOf(workflowItemService.countAll(context))));
|
||||
sb.append(String.format("Count %-14s: %s\n", "WorkspaceItem",
|
||||
String.valueOf(workspaceItemService.countTotal(context))));
|
||||
return sb.toString();
|
||||
|
@@ -60,32 +60,7 @@ public class HandleIdentifierProvider extends IdentifierProvider {
|
||||
|
||||
@Override
|
||||
public boolean supports(String identifier) {
|
||||
String prefix = handleService.getPrefix();
|
||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("handle.canonical.prefix");
|
||||
if (identifier == null) {
|
||||
return false;
|
||||
}
|
||||
// return true if handle has valid starting pattern
|
||||
if (identifier.startsWith(prefix + "/")
|
||||
|| identifier.startsWith(canonicalPrefix)
|
||||
|| identifier.startsWith("hdl:")
|
||||
|| identifier.startsWith("info:hdl")
|
||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
//Check additional prefixes supported in the config file
|
||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getArrayProperty("handle.additional.prefixes");
|
||||
for (String additionalPrefix : additionalPrefixes) {
|
||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return handleService.parseHandle(identifier) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -161,6 +136,7 @@ public class HandleIdentifierProvider extends IdentifierProvider {
|
||||
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
||||
// We can do nothing with this, return null
|
||||
try {
|
||||
identifier = handleService.parseHandle(identifier);
|
||||
return handleService.resolveToObject(context, identifier);
|
||||
} catch (IllegalStateException | SQLException e) {
|
||||
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
||||
|
@@ -78,33 +78,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider {
|
||||
|
||||
@Override
|
||||
public boolean supports(String identifier) {
|
||||
String prefix = handleService.getPrefix();
|
||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("handle.canonical.prefix");
|
||||
if (identifier == null) {
|
||||
return false;
|
||||
}
|
||||
// return true if handle has valid starting pattern
|
||||
if (identifier.startsWith(prefix + "/")
|
||||
|| identifier.startsWith(canonicalPrefix)
|
||||
|| identifier.startsWith("hdl:")
|
||||
|| identifier.startsWith("info:hdl")
|
||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
//Check additional prefixes supported in the config file
|
||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getArrayProperty("handle.additional.prefixes");
|
||||
for (String additionalPrefix : additionalPrefixes) {
|
||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, assume invalid handle
|
||||
return false;
|
||||
return handleService.parseHandle(identifier) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -310,6 +284,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider {
|
||||
public DSpaceObject resolve(Context context, String identifier, String... attributes) {
|
||||
// We can do nothing with this, return null
|
||||
try {
|
||||
identifier = handleService.parseHandle(identifier);
|
||||
return handleService.resolveToObject(context, identifier);
|
||||
} catch (IllegalStateException | SQLException e) {
|
||||
log.error(LogManager.getHeader(context, "Error while resolving handle to item", "handle: " + identifier),
|
||||
|
@@ -72,33 +72,7 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
|
||||
|
||||
@Override
|
||||
public boolean supports(String identifier) {
|
||||
String prefix = handleService.getPrefix();
|
||||
String canonicalPrefix = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getProperty("handle.canonical.prefix");
|
||||
if (identifier == null) {
|
||||
return false;
|
||||
}
|
||||
// return true if handle has valid starting pattern
|
||||
if (identifier.startsWith(prefix + "/")
|
||||
|| identifier.startsWith(canonicalPrefix)
|
||||
|| identifier.startsWith("hdl:")
|
||||
|| identifier.startsWith("info:hdl")
|
||||
|| identifier.matches("^https?://hdl\\.handle\\.net/.*")
|
||||
|| identifier.matches("^https?://.+/handle/.*")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
//Check additional prefixes supported in the config file
|
||||
String[] additionalPrefixes = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getArrayProperty("handle.additional.prefixes");
|
||||
for (String additionalPrefix : additionalPrefixes) {
|
||||
if (identifier.startsWith(additionalPrefix + "/")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise, assume invalid handle
|
||||
return false;
|
||||
return handleService.parseHandle(identifier) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -13,14 +13,6 @@ This documentation explains the implementation of the importer framework using p
|
||||
The configuration done for pubmed specifically is located at pubmed-integration.xml in dspace/config/spring/api
|
||||
I will not go into detail to what exactly is configured for the pubmed integration as it is simply a usage of the classes explained [here](../README.md)
|
||||
|
||||
# Additional Config <a name="Additional-config"></a> #
|
||||
|
||||
To be able to do the lookup for our configured import-service, we need to be able to know what url to use to check for publications.
|
||||
This can be done by setting the `publication-lookup.url` property in `publication-lookup.cfg` to one of two settings.
|
||||
|
||||
- Setting the `publication-lookup.url` property to the address as defined in the configured importservice (PubmedImportService in this case) in the file `spring-dspace-addon-import-services.xml`. This will check this single configured url for publications.
|
||||
- Setting the `publication-lookup.url` property to an askterisk '*'. This will check all configured importServices for their urls to base the search for publications on.
|
||||
|
||||
# Pubmed specific classes Config <a name="Pubmed-specific"></a> #
|
||||
|
||||
These classes are simply implementations based of the base classes defined in importer/external. They add characteristic behaviour for services/mapping for the pubmed specific data.
|
||||
|
@@ -1,41 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.plugin;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Interface that must be implemented by any plugin wanting to be called at the
|
||||
* inception of the Collection home page (in HandleServlet). Classes that
|
||||
* implement the process method and appear in the configuration will be run
|
||||
* before the at the start of preparing the collection home page has any chance
|
||||
* to continue its execution
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
public interface CollectionHomeProcessor {
|
||||
/**
|
||||
* execute the process
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param request the HTTP request
|
||||
* @param response the HTTP response
|
||||
* @param collection the collection object whose home page we are on
|
||||
* @throws PluginException any particular problem with the plugin execution
|
||||
* @throws AuthorizeException Authorisation errors during plugin execution
|
||||
*/
|
||||
void process(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, Collection collection)
|
||||
throws PluginException, AuthorizeException;
|
||||
|
||||
}
|
@@ -1,40 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.plugin;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Interface that must be implemented by any plugin wanting to be called at
|
||||
* the inception of the Community home page (in HandleServlet). Classes that implement the process method
|
||||
* and appear in the configuration will be run before the at the start of preparing the community home page has any
|
||||
* chance to continue its execution
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
public interface CommunityHomeProcessor {
|
||||
/**
|
||||
* execute the process
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param request the HTTP request
|
||||
* @param response the HTTP response
|
||||
* @param community The community object whose home page we are on
|
||||
* @throws PluginException any particular problem with the plugin execution
|
||||
* @throws AuthorizeException Authorisation errors during plugin execution
|
||||
*/
|
||||
void process(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, Community community)
|
||||
throws PluginException, AuthorizeException;
|
||||
|
||||
}
|
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.plugin;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Interface that must be implemented by any plugin wanting to be called at the
|
||||
* inception of the Item page (in HandleServlet). Classes that
|
||||
* implement the process method and appear in the configuration will be run
|
||||
* before the at the start of preparing the item home page has any chance
|
||||
* to continue its execution. <b>Note that the plugin is executed also before
|
||||
* than the READ permission on the item is checked</b>
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public interface ItemHomeProcessor {
|
||||
/**
|
||||
* execute the process
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param request the HTTP request
|
||||
* @param response the HTTP response
|
||||
* @param item the item object whose home page we are on
|
||||
* @throws PluginException any particular problem with the plugin execution
|
||||
* @throws AuthorizeException Authorisation errors during plugin execution
|
||||
*/
|
||||
void process(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response, Item item)
|
||||
throws PluginException, AuthorizeException;
|
||||
|
||||
}
|
@@ -1,51 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.plugin;
|
||||
|
||||
/**
|
||||
* General exception class for all code that runs as a plugin in DSpace
|
||||
*
|
||||
* @author Richard Jones
|
||||
*/
|
||||
public class PluginException extends Exception {
|
||||
/**
|
||||
* basic constructor
|
||||
*/
|
||||
public PluginException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an exception with the passed message
|
||||
*
|
||||
* @param message a message for the exception
|
||||
*/
|
||||
public PluginException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an exception with the passed message to encapsulate
|
||||
* the passed Throwable
|
||||
*
|
||||
* @param message a message for the exception
|
||||
* @param e throwable which triggered this exception
|
||||
*/
|
||||
public PluginException(String message, Throwable e) {
|
||||
super(message, e);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an exception to encapsulate the passed Throwable
|
||||
*
|
||||
* @param e the throwable which triggered this exception
|
||||
*/
|
||||
public PluginException(Throwable e) {
|
||||
super(e);
|
||||
}
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.plugin;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* Interface that must be implemented by any plugin wanting to be called at the
|
||||
* inception of the Site home page (in index.jsp "welcome servlet"). Classes that
|
||||
* implement the process method and appear in the configuration will be run
|
||||
* before the at the start of preparing the home page has any chance
|
||||
* to continue its execution
|
||||
*
|
||||
* @author Andrea Bollini
|
||||
*/
|
||||
public interface SiteHomeProcessor {
|
||||
/**
|
||||
* execute the process
|
||||
*
|
||||
* @param context the DSpace context
|
||||
* @param request the HTTP request
|
||||
* @param response the HTTP response
|
||||
* @throws PluginException any particular problem with the plugin execution
|
||||
* @throws AuthorizeException Authorisation errors during plugin execution
|
||||
*/
|
||||
void process(Context context, HttpServletRequest request,
|
||||
HttpServletResponse response)
|
||||
throws PluginException, AuthorizeException;
|
||||
|
||||
}
|
@@ -201,14 +201,11 @@ public class DatabaseUtils {
|
||||
"You've specified to migrate your database ONLY to version " + migrationVersion + " " +
|
||||
"...");
|
||||
System.out.println(
|
||||
"\nWARNING: It is highly likely you will see errors in your logs when the Metadata");
|
||||
System.out.println(
|
||||
"or Bitstream Format Registry auto-update. This is because you are attempting to");
|
||||
System.out.println(
|
||||
"use an OLD version " + migrationVersion + " Database with a newer DSpace API. NEVER " +
|
||||
"do this in a");
|
||||
System.out.println(
|
||||
"PRODUCTION scenario. The resulting old DB is only useful for migration testing.\n");
|
||||
"\nWARNING: In this mode, we DISABLE all callbacks, which means that you will need " +
|
||||
"to manually update registries and manually run a reindex. This is because you " +
|
||||
"are attempting to use an OLD version (" + migrationVersion + ") Database with " +
|
||||
"a newer DSpace API. NEVER do this in a PRODUCTION scenario. The resulting " +
|
||||
"database is only useful for migration testing.\n");
|
||||
|
||||
System.out.print(
|
||||
"Are you SURE you only want to migrate your database to version " + migrationVersion
|
||||
@@ -647,7 +644,7 @@ public class DatabaseUtils {
|
||||
* @param datasource DataSource object (retrieved from DatabaseManager())
|
||||
* @param connection Database connection
|
||||
* @param targetVersion If specified, only migrate the database to a particular *version* of DSpace. This is
|
||||
* mostly just useful for testing.
|
||||
* just useful for testing migrations, and should NOT be used in Production.
|
||||
* If null, the database is migrated to the latest version.
|
||||
* @param outOfOrder If true, Flyway will run any lower version migrations that were previously "ignored".
|
||||
* If false, Flyway will only run new migrations with a higher version number.
|
||||
@@ -661,6 +658,10 @@ public class DatabaseUtils {
|
||||
throw new SQLException("The datasource is a null reference -- cannot continue.");
|
||||
}
|
||||
|
||||
// Whether to reindex all content in Solr after successfully updating database
|
||||
boolean reindexAfterUpdate = DSpaceServicesFactory.getInstance().getConfigurationService()
|
||||
.getBooleanProperty("discovery.autoReindex", true);
|
||||
|
||||
try {
|
||||
// Setup Flyway API against our database
|
||||
FluentConfiguration flywayConfiguration = setupFlyway(datasource);
|
||||
@@ -671,8 +672,14 @@ public class DatabaseUtils {
|
||||
|
||||
// If a target version was specified, tell Flyway to ONLY migrate to that version
|
||||
// (i.e. all later migrations are left as "pending"). By default we always migrate to latest version.
|
||||
// This mode is only useful for testing migrations & should NEVER be used in Production.
|
||||
if (!StringUtils.isBlank(targetVersion)) {
|
||||
flywayConfiguration.target(targetVersion);
|
||||
// Disable all callbacks. Most callbacks use the Context object which triggers a full database update,
|
||||
// bypassing this target version.
|
||||
flywayConfiguration.callbacks(new Callback[]{});
|
||||
// Also disable reindex after update for this migration mode (as reindex also uses Context object)
|
||||
reindexAfterUpdate = false;
|
||||
}
|
||||
|
||||
// Initialized Flyway object (will be created by flywayConfiguration.load() below)
|
||||
@@ -722,7 +729,7 @@ public class DatabaseUtils {
|
||||
flyway.migrate();
|
||||
|
||||
// Flag that Discovery will need reindexing, since database was updated
|
||||
setReindexDiscovery(true);
|
||||
setReindexDiscovery(reindexAfterUpdate);
|
||||
} else {
|
||||
log.info("DSpace database schema is up to date");
|
||||
}
|
||||
|
@@ -21,8 +21,6 @@ import org.dspace.content.NonUniqueMetadataException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.service.XmlWorkflowService;
|
||||
import org.flywaydb.core.api.callback.Callback;
|
||||
import org.flywaydb.core.api.callback.Event;
|
||||
import org.slf4j.Logger;
|
||||
@@ -80,13 +78,9 @@ public class RegistryUpdater implements Callback {
|
||||
MetadataImporter.loadRegistry(base + namespaceFile, true);
|
||||
}
|
||||
|
||||
// Check if XML Workflow is enabled in workflow.cfg
|
||||
if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) {
|
||||
// If so, load in the workflow metadata types as well
|
||||
String workflowTypes = "workflow-types.xml";
|
||||
log.info("Reading {}", workflowTypes);
|
||||
MetadataImporter.loadRegistry(base + workflowTypes, true);
|
||||
}
|
||||
String workflowTypes = "workflow-types.xml";
|
||||
log.info("Reading {}", workflowTypes);
|
||||
MetadataImporter.loadRegistry(base + workflowTypes, true);
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
// Commit changes and close context
|
||||
|
@@ -8,8 +8,6 @@
|
||||
package org.dspace.storage.rdbms.migration;
|
||||
|
||||
import org.dspace.storage.rdbms.DatabaseUtils;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.service.XmlWorkflowService;
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration;
|
||||
import org.flywaydb.core.api.migration.Context;
|
||||
|
||||
@@ -26,24 +24,20 @@ public class V7_0_2018_04_03__Upgrade_Workflow_Policy extends BaseJavaMigration
|
||||
|
||||
@Override
|
||||
public void migrate(Context context) throws Exception {
|
||||
// Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service
|
||||
// is enabled.
|
||||
if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) {
|
||||
// Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) {
|
||||
String dbtype = DatabaseUtils.getDbType(context.getConnection());
|
||||
// Check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
if (DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) {
|
||||
String dbtype = DatabaseUtils.getDbType(context.getConnection());
|
||||
|
||||
String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/";
|
||||
String dataMigrateSQL = MigrationUtils.getResourceAsString(
|
||||
String sqlMigrationPath = "org/dspace/storage/rdbms/sqlmigration/workflow/" + dbtype + "/";
|
||||
String dataMigrateSQL = MigrationUtils.getResourceAsString(
|
||||
sqlMigrationPath + "xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql");
|
||||
|
||||
// Actually execute the Data migration SQL
|
||||
// This will migrate all existing traditional workflows to the new XMLWorkflow system & tables
|
||||
DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL);
|
||||
// Actually execute the Data migration SQL
|
||||
// This will migrate all existing traditional workflows to the new XMLWorkflow system & tables
|
||||
DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL);
|
||||
|
||||
// Assuming both succeeded, save the size of the scripts for getChecksum() below
|
||||
migration_file_size = dataMigrateSQL.length();
|
||||
}
|
||||
// Assuming both succeeded, save the size of the scripts for getChecksum() below
|
||||
migration_file_size = dataMigrateSQL.length();
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -12,8 +12,6 @@ import java.sql.SQLException;
|
||||
|
||||
import org.dspace.storage.rdbms.DatabaseUtils;
|
||||
import org.dspace.storage.rdbms.migration.MigrationUtils;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.service.XmlWorkflowService;
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration;
|
||||
import org.flywaydb.core.api.migration.Context;
|
||||
import org.slf4j.Logger;
|
||||
@@ -55,15 +53,12 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration
|
||||
@Override
|
||||
public void migrate(Context context)
|
||||
throws IOException, SQLException {
|
||||
// Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service
|
||||
// is enabled.
|
||||
if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService
|
||||
// If your database was upgraded to DSpace 6 prior to enabling XML Workflow, we MUST skip this 5.x
|
||||
// migration, as it is incompatible
|
||||
// with a 6.x database. In that scenario the corresponding 6.x XML Workflow migration will create
|
||||
// necessary tables.
|
||||
&& DatabaseUtils.getCurrentFlywayDSpaceState(context.getConnection()) < 6) {
|
||||
// Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
// If your database was upgraded to DSpace 6 prior to enabling XML Workflow, we MUST skip this 5.x
|
||||
// migration, as it is incompatible
|
||||
// with a 6.x database. In that scenario the corresponding 6.x XML Workflow migration will create
|
||||
// necessary tables.
|
||||
if (DatabaseUtils.getCurrentFlywayDSpaceState(context.getConnection()) < 6) {
|
||||
// Check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
// If XMLWorkflow Table does NOT exist in this database, then lets do the migration!
|
||||
// If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql
|
||||
// scripts
|
||||
|
@@ -10,8 +10,6 @@ package org.dspace.storage.rdbms.xmlworkflow;
|
||||
|
||||
import org.dspace.storage.rdbms.DatabaseUtils;
|
||||
import org.dspace.storage.rdbms.migration.MigrationUtils;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.service.XmlWorkflowService;
|
||||
import org.flywaydb.core.api.migration.BaseJavaMigration;
|
||||
import org.flywaydb.core.api.migration.Context;
|
||||
|
||||
@@ -39,51 +37,47 @@ public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration extends BaseJ
|
||||
|
||||
@Override
|
||||
public void migrate(Context context) throws Exception {
|
||||
// Make sure XML Workflow is enabled, shouldn't even be needed since this class is only loaded if the service
|
||||
// is enabled.
|
||||
if (WorkflowServiceFactory.getInstance().getWorkflowService() instanceof XmlWorkflowService) {
|
||||
// Now, check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
// If XMLWorkflow Table does NOT exist in this database, then lets do the migration!
|
||||
// If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql
|
||||
// scripts
|
||||
if (!DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) {
|
||||
String dbtype = context.getConnection().getMetaData().getDatabaseProductName();
|
||||
String dbFileLocation = null;
|
||||
if (dbtype.toLowerCase().contains("postgres")) {
|
||||
dbFileLocation = "postgres";
|
||||
} else if (dbtype.toLowerCase().contains("oracle")) {
|
||||
dbFileLocation = "oracle";
|
||||
} else if (dbtype.toLowerCase().contains("h2")) {
|
||||
dbFileLocation = "h2";
|
||||
}
|
||||
|
||||
|
||||
// Determine path of this migration class (as the SQL scripts
|
||||
// we will run are based on this path under /src/main/resources)
|
||||
String packagePath = V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.class.getPackage().getName()
|
||||
.replace(".", "/");
|
||||
|
||||
// Get the contents of our DB Schema migration script, based on path & DB type
|
||||
// (e.g. /src/main/resources/[path-to-this-class]/postgres/xml_workflow_migration.sql)
|
||||
String dbMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation +
|
||||
"/v6.0__DS-2701_xml_workflow_migration.sql");
|
||||
|
||||
// Actually execute the Database schema migration SQL
|
||||
// This will create the necessary tables for the XMLWorkflow feature
|
||||
DatabaseUtils.executeSql(context.getConnection(), dbMigrateSQL);
|
||||
|
||||
// Get the contents of our data migration script, based on path & DB type
|
||||
// (e.g. /src/main/resources/[path-to-this-class]/postgres/data_workflow_migration.sql)
|
||||
String dataMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation +
|
||||
"/v6.0__DS-2701_data_workflow_migration.sql");
|
||||
|
||||
// Actually execute the Data migration SQL
|
||||
// This will migrate all existing traditional workflows to the new XMLWorkflow system & tables
|
||||
DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL);
|
||||
|
||||
// Assuming both succeeded, save the size of the scripts for getChecksum() below
|
||||
migration_file_size = dbMigrateSQL.length() + dataMigrateSQL.length();
|
||||
// Check if the XMLWorkflow table (cwf_workflowitem) already exists in this database
|
||||
// If XMLWorkflow Table does NOT exist in this database, then lets do the migration!
|
||||
// If XMLWorkflow Table ALREADY exists, then this migration is a noop, we assume you manually ran the sql
|
||||
// scripts
|
||||
if (!DatabaseUtils.tableExists(context.getConnection(), "cwf_workflowitem")) {
|
||||
String dbtype = context.getConnection().getMetaData().getDatabaseProductName();
|
||||
String dbFileLocation = null;
|
||||
if (dbtype.toLowerCase().contains("postgres")) {
|
||||
dbFileLocation = "postgres";
|
||||
} else if (dbtype.toLowerCase().contains("oracle")) {
|
||||
dbFileLocation = "oracle";
|
||||
} else if (dbtype.toLowerCase().contains("h2")) {
|
||||
dbFileLocation = "h2";
|
||||
}
|
||||
|
||||
|
||||
// Determine path of this migration class (as the SQL scripts
|
||||
// we will run are based on this path under /src/main/resources)
|
||||
String packagePath = V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.class.getPackage().getName()
|
||||
.replace(".", "/");
|
||||
|
||||
// Get the contents of our DB Schema migration script, based on path & DB type
|
||||
// (e.g. /src/main/resources/[path-to-this-class]/postgres/xml_workflow_migration.sql)
|
||||
String dbMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation +
|
||||
"/v6.0__DS-2701_xml_workflow_migration.sql");
|
||||
|
||||
// Actually execute the Database schema migration SQL
|
||||
// This will create the necessary tables for the XMLWorkflow feature
|
||||
DatabaseUtils.executeSql(context.getConnection(), dbMigrateSQL);
|
||||
|
||||
// Get the contents of our data migration script, based on path & DB type
|
||||
// (e.g. /src/main/resources/[path-to-this-class]/postgres/data_workflow_migration.sql)
|
||||
String dataMigrateSQL = MigrationUtils.getResourceAsString(packagePath + "/" + dbFileLocation +
|
||||
"/v6.0__DS-2701_data_workflow_migration.sql");
|
||||
|
||||
// Actually execute the Data migration SQL
|
||||
// This will migrate all existing traditional workflows to the new XMLWorkflow system & tables
|
||||
DatabaseUtils.executeSql(context.getConnection(), dataMigrateSQL);
|
||||
|
||||
// Assuming both succeeded, save the size of the scripts for getChecksum() below
|
||||
migration_file_size = dbMigrateSQL.length() + dataMigrateSQL.length();
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -7,6 +7,21 @@
|
||||
*/
|
||||
package org.dspace.submit.model;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.text.ParseException;
|
||||
import java.util.Date;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.ResourcePolicy;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.dspace.util.DateMathParser;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* This class represents an option available in the submission upload section to
|
||||
* set permission on a file. An option is defined by a name such as "open
|
||||
@@ -18,6 +33,15 @@ package org.dspace.submit.model;
|
||||
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
|
||||
*/
|
||||
public class AccessConditionOption {
|
||||
|
||||
@Autowired
|
||||
AuthorizeService authorizeService;
|
||||
|
||||
@Autowired
|
||||
GroupService groupService;
|
||||
|
||||
DateMathParser dateMathParser = new DateMathParser();
|
||||
|
||||
/** An unique name identifying the access contion option **/
|
||||
private String name;
|
||||
|
||||
@@ -27,16 +51,6 @@ public class AccessConditionOption {
|
||||
*/
|
||||
private String groupName;
|
||||
|
||||
/**
|
||||
* this is in alternative to the {@link #groupName}. The sub-groups listed in
|
||||
* the DSpace group identified by the name here specified will be available to
|
||||
* the user to personalize the access condition. They can be for instance
|
||||
* University Staff, University Students, etc. so that a "restricted access"
|
||||
* option can be further specified without the need to create separate access
|
||||
* condition options for each group
|
||||
*/
|
||||
private String selectGroupName;
|
||||
|
||||
/**
|
||||
* set to <code>true</code> if this option requires a start date to be indicated
|
||||
* for the underlying resource policy to create
|
||||
@@ -95,6 +109,10 @@ public class AccessConditionOption {
|
||||
this.hasEndDate = hasEndDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explanation see: {@link #startDateLimit}
|
||||
* @return startDateLimit
|
||||
*/
|
||||
public String getStartDateLimit() {
|
||||
return startDateLimit;
|
||||
}
|
||||
@@ -103,6 +121,10 @@ public class AccessConditionOption {
|
||||
this.startDateLimit = startDateLimit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Explanation see: {@link #endDateLimit}
|
||||
* @return endDateLimit
|
||||
*/
|
||||
public String getEndDateLimit() {
|
||||
return endDateLimit;
|
||||
}
|
||||
@@ -111,11 +133,62 @@ public class AccessConditionOption {
|
||||
this.endDateLimit = endDateLimit;
|
||||
}
|
||||
|
||||
public String getSelectGroupName() {
|
||||
return selectGroupName;
|
||||
}
|
||||
/**
|
||||
* Create a new resource policy for a bitstream
|
||||
* @param context DSpace context
|
||||
* @param b bitstream for which resource policy is created
|
||||
* @param name name of the resource policy
|
||||
* @param description description of the resource policy
|
||||
* @param startDate start date of the resource policy. If {@link #getHasStartDate()} returns false,
|
||||
* startDate should be null. Otherwise startDate may not be null.
|
||||
* @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false,
|
||||
* endDate should be null. Otherwise endDate may not be null.
|
||||
*/
|
||||
public void createResourcePolicy(Context context, Bitstream b, String name, String description,
|
||||
Date startDate, Date endDate)
|
||||
throws SQLException, AuthorizeException, ParseException {
|
||||
if (getHasStartDate() && startDate == null) {
|
||||
throw new IllegalStateException("The access condition " + getName() + " requires a start date.");
|
||||
}
|
||||
if (getHasEndDate() && endDate == null) {
|
||||
throw new IllegalStateException("The access condition " + getName() + " requires an end date.");
|
||||
}
|
||||
if (!getHasStartDate() && startDate != null) {
|
||||
throw new IllegalStateException("The access condition " + getName() + " cannot contain a start date.");
|
||||
}
|
||||
if (!getHasEndDate() && endDate != null) {
|
||||
throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date.");
|
||||
}
|
||||
|
||||
public void setSelectGroupName(String selectGroupName) {
|
||||
this.selectGroupName = selectGroupName;
|
||||
Date latestStartDate = null;
|
||||
if (getStartDateLimit() != null) {
|
||||
latestStartDate = dateMathParser.parseMath(getStartDateLimit());
|
||||
}
|
||||
|
||||
Date latestEndDate = null;
|
||||
if (getEndDateLimit() != null) {
|
||||
latestEndDate = dateMathParser.parseMath(getEndDateLimit());
|
||||
}
|
||||
|
||||
// throw if startDate after latestStartDate
|
||||
if (startDate != null && latestStartDate != null && startDate.after(latestStartDate)) {
|
||||
throw new IllegalStateException(String.format(
|
||||
"The start date of access condition %s should be earlier than %s from now.",
|
||||
getName(), getStartDateLimit()
|
||||
));
|
||||
}
|
||||
|
||||
// throw if endDate after latestEndDate
|
||||
if (endDate != null && latestEndDate != null && endDate.after(latestEndDate)) {
|
||||
throw new IllegalStateException(String.format(
|
||||
"The end date of access condition %s should be earlier than %s from now.",
|
||||
getName(), getEndDateLimit()
|
||||
));
|
||||
}
|
||||
|
||||
Group group = groupService.findByName(context, getGroupName());
|
||||
authorizeService.createResourcePolicy(context, b, group, null, Constants.READ,
|
||||
ResourcePolicy.TYPE_CUSTOM, name, description, startDate,
|
||||
endDate);
|
||||
}
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.utils;
|
||||
package org.dspace.util;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.time.Instant;
|
@@ -15,5 +15,5 @@ import org.dspace.content.InProgressSubmission;
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public interface WorkflowItem extends InProgressSubmission {
|
||||
public int getState();
|
||||
|
||||
}
|
||||
|
@@ -1,185 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.OneToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
|
||||
/**
|
||||
* Class representing an item going through the workflow process in DSpace
|
||||
*
|
||||
* @author Robert Tansley
|
||||
* @version $Revision$
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "workflowitem")
|
||||
public class BasicWorkflowItem implements WorkflowItem {
|
||||
|
||||
@Id
|
||||
@Column(name = "workflow_id", unique = true, nullable = false)
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "workflowitem_seq")
|
||||
@SequenceGenerator(name = "workflowitem_seq", sequenceName = "workflowitem_seq", allocationSize = 1)
|
||||
private Integer workflowitemId;
|
||||
|
||||
|
||||
/**
|
||||
* The item this workflow object pertains to
|
||||
*/
|
||||
@OneToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "item_id", unique = true)
|
||||
private Item item;
|
||||
|
||||
/**
|
||||
* The collection the item is being submitted to
|
||||
*/
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "collection_id")
|
||||
private Collection collection;
|
||||
|
||||
/**
|
||||
* EPerson owning the current state
|
||||
*/
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "owner")
|
||||
private EPerson owner;
|
||||
|
||||
@Column(name = "state")
|
||||
private int state;
|
||||
|
||||
@Column(name = "multiple_titles")
|
||||
private boolean multipleTitles = false;
|
||||
|
||||
@Column(name = "published_before")
|
||||
private boolean publishedBefore = false;
|
||||
|
||||
@Column(name = "multiple_files")
|
||||
private boolean multipleFiles = false;
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.workflowbasic.service.BasicWorkflowItemService#create(Context, Item, Collection)}
|
||||
*/
|
||||
protected BasicWorkflowItem() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the internal ID of this workflow item
|
||||
*
|
||||
* @return the internal identifier
|
||||
*/
|
||||
@Override
|
||||
public Integer getID() {
|
||||
return workflowitemId;
|
||||
}
|
||||
|
||||
/**
|
||||
* get owner of WorkflowItem
|
||||
*
|
||||
* @return EPerson owner
|
||||
*/
|
||||
public EPerson getOwner() {
|
||||
return owner;
|
||||
}
|
||||
|
||||
/**
|
||||
* set owner of WorkflowItem
|
||||
*
|
||||
* @param ep owner
|
||||
*/
|
||||
public void setOwner(EPerson ep) {
|
||||
this.owner = ep;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get state of WorkflowItem
|
||||
*
|
||||
* @return state
|
||||
*/
|
||||
public int getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set state of WorkflowItem
|
||||
*
|
||||
* @param newstate new state (from <code>WorkflowManager</code>)
|
||||
*/
|
||||
public void setState(int newstate) {
|
||||
this.state = newstate;
|
||||
}
|
||||
|
||||
// InProgressSubmission methods
|
||||
@Override
|
||||
public Item getItem() {
|
||||
return item;
|
||||
}
|
||||
|
||||
void setItem(Item item) {
|
||||
this.item = item;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection getCollection() {
|
||||
return collection;
|
||||
}
|
||||
|
||||
public void setCollection(Collection collection) {
|
||||
this.collection = collection;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EPerson getSubmitter() {
|
||||
return item.getSubmitter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasMultipleFiles() {
|
||||
return multipleFiles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMultipleFiles(boolean b) {
|
||||
this.multipleFiles = b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasMultipleTitles() {
|
||||
return multipleTitles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMultipleTitles(boolean b) {
|
||||
this.multipleTitles = b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPublishedBefore() {
|
||||
return publishedBefore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPublishedBefore(boolean b) {
|
||||
this.publishedBefore = b;
|
||||
}
|
||||
}
|
@@ -1,171 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.dao.BasicWorkflowItemDAO;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.workflowbasic.service.TaskListItemService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Service implementation for the BasicWorkflowItem object.
|
||||
* This class is responsible for all business logic calls for the BasicWorkflowItem object and is autowired by spring.
|
||||
* This class should never be accessed directly.
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class BasicWorkflowItemServiceImpl implements BasicWorkflowItemService {
|
||||
|
||||
/**
|
||||
* log4j category
|
||||
*/
|
||||
protected static Logger log = org.apache.logging.log4j.LogManager.getLogger(BasicWorkflowItem.class);
|
||||
|
||||
@Autowired(required = true)
|
||||
protected BasicWorkflowItemDAO workflowItemDAO;
|
||||
|
||||
@Autowired(required = true)
|
||||
protected ItemService itemService;
|
||||
@Autowired(required = true)
|
||||
protected TaskListItemService taskListItemService;
|
||||
|
||||
|
||||
protected BasicWorkflowItemServiceImpl() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public BasicWorkflowItem create(Context context, Item item, Collection collection)
|
||||
throws SQLException, AuthorizeException {
|
||||
if (findByItem(context, item) != null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Unable to create a workflow item for an item that already has a workflow item.");
|
||||
}
|
||||
BasicWorkflowItem workflowItem = workflowItemDAO.create(context, new BasicWorkflowItem());
|
||||
workflowItem.setItem(item);
|
||||
workflowItem.setCollection(collection);
|
||||
update(context, workflowItem);
|
||||
return workflowItem;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BasicWorkflowItem find(Context context, int id) throws SQLException {
|
||||
BasicWorkflowItem workflowItem = workflowItemDAO.findByID(context, BasicWorkflowItem.class, id);
|
||||
|
||||
if (workflowItem == null) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(LogManager.getHeader(context, "find_workflow_item",
|
||||
"not_found,workflow_id=" + id));
|
||||
}
|
||||
} else {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug(LogManager.getHeader(context, "find_workflow_item",
|
||||
"workflow_id=" + id));
|
||||
}
|
||||
}
|
||||
return workflowItem;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findAll(Context context) throws SQLException {
|
||||
return workflowItemDAO.findAll(context, BasicWorkflowItem.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findBySubmitter(Context context, EPerson ep) throws SQLException {
|
||||
return workflowItemDAO.findBySubmitter(context, ep);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByCollection(Context context, Collection collection)
|
||||
throws SQLException, IOException, AuthorizeException {
|
||||
List<BasicWorkflowItem> workflowItems = findByCollection(context, collection);
|
||||
Iterator<BasicWorkflowItem> iterator = workflowItems.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
BasicWorkflowItem workflowItem = iterator.next();
|
||||
iterator.remove();
|
||||
delete(context, workflowItem);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(Context context, BasicWorkflowItem workflowItem)
|
||||
throws SQLException, AuthorizeException, IOException {
|
||||
Item item = workflowItem.getItem();
|
||||
deleteWrapper(context, workflowItem);
|
||||
itemService.delete(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findByCollection(Context context, Collection collection) throws SQLException {
|
||||
return workflowItemDAO.findByCollection(context, collection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BasicWorkflowItem findByItem(Context context, Item item) throws SQLException {
|
||||
return workflowItemDAO.findByItem(context, item);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteWrapper(Context context, BasicWorkflowItem workflowItem) throws SQLException, AuthorizeException {
|
||||
// delete any pending tasks
|
||||
taskListItemService.deleteByWorkflowItem(context, workflowItem);
|
||||
|
||||
// FIXME - auth?
|
||||
workflowItemDAO.delete(context, workflowItem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Context context, BasicWorkflowItem workflowItem) throws SQLException, AuthorizeException {
|
||||
// FIXME check auth
|
||||
log.info(LogManager.getHeader(context, "update_workflow_item",
|
||||
"workflow_item_id=" + workflowItem.getID()));
|
||||
|
||||
|
||||
// Update the item
|
||||
itemService.update(context, workflowItem.getItem());
|
||||
|
||||
// Update ourselves
|
||||
workflowItemDAO.save(context, workflowItem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findPooledTasks(Context context, EPerson ePerson) throws SQLException {
|
||||
return workflowItemDAO.findByPooledTasks(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findByOwner(Context context, EPerson ePerson) throws SQLException {
|
||||
return workflowItemDAO.findByOwner(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countTotal(Context context) throws SQLException {
|
||||
return workflowItemDAO.countRows(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void move(Context context, BasicWorkflowItem inProgressSubmission, Collection fromCollection,
|
||||
Collection toCollection) {
|
||||
// TODO not implemented yet
|
||||
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@@ -1,79 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.FetchType;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.GenerationType;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.ManyToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.ReloadableEntity;
|
||||
import org.dspace.eperson.EPerson;
|
||||
|
||||
/**
|
||||
* Database entity representation of the TaskListItem table
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "tasklistitem")
|
||||
public class TaskListItem implements ReloadableEntity<Integer> {
|
||||
|
||||
@Id
|
||||
@Column(name = "tasklist_id", unique = true, nullable = false)
|
||||
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "tasklistitem_seq")
|
||||
@SequenceGenerator(name = "tasklistitem_seq", sequenceName = "tasklistitem_seq", allocationSize = 1)
|
||||
private int taskListItemId;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "eperson_id")
|
||||
private EPerson ePerson;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "workflow_id")
|
||||
private BasicWorkflowItem workflowItem;
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.workflowbasic.service.TaskListItemService#create(Context, BasicWorkflowItem, EPerson)}
|
||||
*/
|
||||
protected TaskListItem() {
|
||||
|
||||
}
|
||||
|
||||
public int getTaskListItemId() {
|
||||
return taskListItemId;
|
||||
}
|
||||
|
||||
public EPerson getEPerson() {
|
||||
return ePerson;
|
||||
}
|
||||
|
||||
public BasicWorkflowItem getWorkflowItem() {
|
||||
return workflowItem;
|
||||
}
|
||||
|
||||
void setEPerson(EPerson ePerson) {
|
||||
this.ePerson = ePerson;
|
||||
}
|
||||
|
||||
void setWorkflowItem(BasicWorkflowItem workflowItem) {
|
||||
this.workflowItem = workflowItem;
|
||||
}
|
||||
|
||||
public Integer getID() {
|
||||
return taskListItemId;
|
||||
}
|
||||
}
|
@@ -1,69 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.dao.TaskListItemDAO;
|
||||
import org.dspace.workflowbasic.service.TaskListItemService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Service implementation for the TaskListItem object.
|
||||
* This class is responsible for all business logic calls for the TaskListItem object and is autowired by spring.
|
||||
* This class should never be accessed directly.
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class TaskListItemServiceImpl implements TaskListItemService {
|
||||
|
||||
@Autowired(required = true)
|
||||
protected TaskListItemDAO taskListItemDAO;
|
||||
|
||||
protected TaskListItemServiceImpl() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskListItem create(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) throws SQLException {
|
||||
TaskListItem taskListItem = taskListItemDAO.create(context, new TaskListItem());
|
||||
taskListItem.setWorkflowItem(workflowItem);
|
||||
taskListItem.setEPerson(ePerson);
|
||||
update(context, taskListItem);
|
||||
return taskListItem;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException {
|
||||
taskListItemDAO.deleteByWorkflowItem(context, workflowItem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson)
|
||||
throws SQLException {
|
||||
taskListItemDAO.deleteByWorkflowItemAndEPerson(context, workflowItem, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
taskListItemDAO.deleteByEPerson(context, ePerson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(Context context, TaskListItem taskListItem) throws SQLException {
|
||||
taskListItemDAO.save(context, taskListItem);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TaskListItem> findByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
return taskListItemDAO.findByEPerson(context, ePerson);
|
||||
}
|
||||
}
|
@@ -1,41 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
|
||||
/**
|
||||
* Database Access Object interface class for the BasicWorkflowItem object.
|
||||
* The implementation of this class is responsible for all database calls for the BasicWorkflowItem object and is
|
||||
* autowired by spring
|
||||
* This class should only be accessed from a single service and should never be exposed outside of the API
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public interface BasicWorkflowItemDAO extends GenericDAO<BasicWorkflowItem> {
|
||||
|
||||
public BasicWorkflowItem findByItem(Context context, Item i) throws SQLException;
|
||||
|
||||
public List<BasicWorkflowItem> findBySubmitter(Context context, EPerson ep) throws SQLException;
|
||||
|
||||
public List<BasicWorkflowItem> findByCollection(Context context, Collection c) throws SQLException;
|
||||
|
||||
public List<BasicWorkflowItem> findByPooledTasks(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
public List<BasicWorkflowItem> findByOwner(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
int countRows(Context context) throws SQLException;
|
||||
}
|
@@ -1,37 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.dao;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.TaskListItem;
|
||||
|
||||
/**
|
||||
* Database Access Object interface class for the TaskListItem object.
|
||||
* The implementation of this class is responsible for all database calls for the TaskListItem object and is
|
||||
* autowired by spring
|
||||
* This class should only be accessed from a single service and should never be exposed outside of the API
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public interface TaskListItemDAO extends GenericDAO<TaskListItem> {
|
||||
|
||||
public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException;
|
||||
|
||||
public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson)
|
||||
throws SQLException;
|
||||
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
public List<TaskListItem> findByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
}
|
@@ -1,105 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Join;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Item_;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem_;
|
||||
import org.dspace.workflowbasic.dao.BasicWorkflowItemDAO;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the BasicWorkflowItem object.
|
||||
* This class is responsible for all database calls for the BasicWorkflowItem object and is autowired by spring
|
||||
* This class should never be accessed directly.
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class BasicWorkflowItemDAOImpl extends AbstractHibernateDAO<BasicWorkflowItem> implements BasicWorkflowItemDAO {
|
||||
protected BasicWorkflowItemDAOImpl() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public BasicWorkflowItem findByItem(Context context, Item i) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, BasicWorkflowItem.class);
|
||||
Root<BasicWorkflowItem> basicWorkflowItemRoot = criteriaQuery.from(BasicWorkflowItem.class);
|
||||
criteriaQuery.select(basicWorkflowItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(basicWorkflowItemRoot.get(BasicWorkflowItem_.item), i));
|
||||
return uniqueResult(context, criteriaQuery, false, BasicWorkflowItem.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findBySubmitter(Context context, EPerson ep) throws SQLException {
|
||||
|
||||
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, BasicWorkflowItem.class);
|
||||
Root<BasicWorkflowItem> basicWorkflowItemRoot = criteriaQuery.from(BasicWorkflowItem.class);
|
||||
Join<BasicWorkflowItem, Item> join = basicWorkflowItemRoot.join("item");
|
||||
criteriaQuery.select(basicWorkflowItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(join.get(Item_.submitter), ep));
|
||||
|
||||
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
|
||||
orderList.add(criteriaBuilder.asc(basicWorkflowItemRoot.get(BasicWorkflowItem_.workflowitemId)));
|
||||
criteriaQuery.orderBy(orderList);
|
||||
|
||||
|
||||
return list(context, criteriaQuery, false, BasicWorkflowItem.class, -1, -1);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findByCollection(Context context, Collection c) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, BasicWorkflowItem.class);
|
||||
Root<BasicWorkflowItem> basicWorkflowItemRoot = criteriaQuery.from(BasicWorkflowItem.class);
|
||||
criteriaQuery.select(basicWorkflowItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(basicWorkflowItemRoot.get(BasicWorkflowItem_.collection), c));
|
||||
return list(context, criteriaQuery, false, BasicWorkflowItem.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findByPooledTasks(Context context, EPerson ePerson) throws SQLException {
|
||||
String queryString = "select wf from TaskListItem as tli join tli.workflowItem wf where tli.ePerson = " +
|
||||
":eperson ORDER BY wf.workflowitemId";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("eperson", ePerson);
|
||||
return list(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BasicWorkflowItem> findByOwner(Context context, EPerson ePerson) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, BasicWorkflowItem.class);
|
||||
Root<BasicWorkflowItem> basicWorkflowItemRoot = criteriaQuery.from(BasicWorkflowItem.class);
|
||||
criteriaQuery.select(basicWorkflowItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(basicWorkflowItemRoot.get(BasicWorkflowItem_.owner), ePerson));
|
||||
return list(context, criteriaQuery, false, BasicWorkflowItem.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countRows(Context context) throws SQLException {
|
||||
return count(createQuery(context, "SELECT count(*) FROM BasicWorkflowItem"));
|
||||
}
|
||||
}
|
@@ -1,72 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.dao.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.criteria.CriteriaBuilder;
|
||||
import javax.persistence.criteria.CriteriaQuery;
|
||||
import javax.persistence.criteria.Root;
|
||||
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.TaskListItem;
|
||||
import org.dspace.workflowbasic.TaskListItem_;
|
||||
import org.dspace.workflowbasic.dao.TaskListItemDAO;
|
||||
|
||||
/**
|
||||
* Hibernate implementation of the Database Access Object interface class for the TaskListItem object.
|
||||
* This class is responsible for all database calls for the TaskListItem object and is autowired by spring
|
||||
* This class should never be accessed directly.
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class TaskListItemDAOImpl extends AbstractHibernateDAO<TaskListItem> implements TaskListItemDAO {
|
||||
protected TaskListItemDAOImpl() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException {
|
||||
String queryString = "delete from TaskListItem where workflowItem = :workflowItem";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("workflowItem", workflowItem);
|
||||
query.executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson)
|
||||
throws SQLException {
|
||||
String queryString = "delete from TaskListItem where workflowItem = :workflowItem AND ePerson = :ePerson";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("workflowItem", workflowItem);
|
||||
query.setParameter("ePerson", ePerson);
|
||||
query.executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
String queryString = "delete from TaskListItem where ePerson = :ePerson";
|
||||
Query query = createQuery(context, queryString);
|
||||
query.setParameter("ePerson", ePerson);
|
||||
query.executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TaskListItem> findByEPerson(Context context, EPerson ePerson) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, TaskListItem.class);
|
||||
Root<TaskListItem> taskListItemRoot = criteriaQuery.from(TaskListItem.class);
|
||||
criteriaQuery.select(taskListItemRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(taskListItemRoot.get(TaskListItem_.ePerson), ePerson));
|
||||
return list(context, criteriaQuery, false, TaskListItem.class, -1, -1);
|
||||
}
|
||||
}
|
@@ -1,34 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.factory;
|
||||
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowService;
|
||||
import org.dspace.workflowbasic.service.TaskListItemService;
|
||||
|
||||
/**
|
||||
* Abstract factory to get services for the workflowbasic package, use BasicWorkflowServiceFactory.getInstance() to
|
||||
* retrieve an implementation
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public abstract class BasicWorkflowServiceFactory extends WorkflowServiceFactory {
|
||||
|
||||
public abstract BasicWorkflowService getBasicWorkflowService();
|
||||
|
||||
public abstract BasicWorkflowItemService getBasicWorkflowItemService();
|
||||
|
||||
public abstract TaskListItemService getTaskListItemService();
|
||||
|
||||
public static BasicWorkflowServiceFactory getInstance() {
|
||||
return DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("workflowServiceFactory", BasicWorkflowServiceFactory.class);
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.factory;
|
||||
|
||||
import org.dspace.workflow.WorkflowItemService;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowItemService;
|
||||
import org.dspace.workflowbasic.service.BasicWorkflowService;
|
||||
import org.dspace.workflowbasic.service.TaskListItemService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Factory implementation to get services for the workflowbasic package, use BasicWorkflowServiceFactory.getInstance
|
||||
* () to retrieve an implementation
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public class BasicWorkflowServiceFactoryImpl extends BasicWorkflowServiceFactory {
|
||||
|
||||
@Autowired(required = true)
|
||||
private BasicWorkflowService basicWorkflowService;
|
||||
@Autowired(required = true)
|
||||
private BasicWorkflowItemService basicWorkflowItemService;
|
||||
@Autowired(required = true)
|
||||
private TaskListItemService taskListItemService;
|
||||
|
||||
|
||||
@Override
|
||||
public BasicWorkflowService getBasicWorkflowService() {
|
||||
return basicWorkflowService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BasicWorkflowItemService getBasicWorkflowItemService() {
|
||||
return basicWorkflowItemService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskListItemService getTaskListItemService() {
|
||||
return taskListItemService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public WorkflowService getWorkflowService() {
|
||||
return getBasicWorkflowService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public WorkflowItemService getWorkflowItemService() {
|
||||
return getBasicWorkflowItemService();
|
||||
}
|
||||
}
|
@@ -1,40 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflow.WorkflowItemService;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
|
||||
/**
|
||||
* Service interface class for the BasicWorkflowItem object.
|
||||
* The implementation of this class is responsible for all business logic calls for the BasicWorkflowItem object and
|
||||
* is autowired by spring
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public interface BasicWorkflowItemService extends WorkflowItemService<BasicWorkflowItem> {
|
||||
|
||||
public List<BasicWorkflowItem> findPooledTasks(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
/**
|
||||
* Retrieve the list of BasicWorkflowItems that the given EPerson is owner of (owner == claimed for review)
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param ePerson The DSpace EPerson object.
|
||||
* @return a list of BasicWorkflowItem objects
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public List<BasicWorkflowItem> findByOwner(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
int countTotal(Context context) throws SQLException;
|
||||
}
|
@@ -1,206 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflow.WorkflowService;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
|
||||
/**
|
||||
* Workflow state machine
|
||||
*
|
||||
* Notes:
|
||||
*
|
||||
* Determining item status from the database:
|
||||
*
|
||||
* When an item has not been submitted yet, it is in the user's personal
|
||||
* workspace (there is a row in PersonalWorkspace pointing to it.)
|
||||
*
|
||||
* When an item is submitted and is somewhere in a workflow, it has a row in the
|
||||
* WorkflowItem table pointing to it. The state of the workflow can be
|
||||
* determined by looking at WorkflowItem.getState()
|
||||
*
|
||||
* When a submission is complete, the WorkflowItem pointing to the item is
|
||||
* destroyed and the archive() method is called, which hooks the item up to the
|
||||
* archive.
|
||||
*
|
||||
* Notification: When an item enters a state that requires notification,
|
||||
* (WFSTATE_STEP1POOL, WFSTATE_STEP2POOL, WFSTATE_STEP3POOL,) the workflow needs
|
||||
* to notify the appropriate groups that they have a pending task to claim.
|
||||
*
|
||||
* Revealing lists of approvers, editors, and reviewers. A method could be added
|
||||
* to do this, but it isn't strictly necessary. (say public List
|
||||
* getStateEPeople( WorkflowItem wi, int state ) could return people affected by
|
||||
* the item's current state.
|
||||
*/
|
||||
public interface BasicWorkflowService extends WorkflowService<BasicWorkflowItem> {
|
||||
|
||||
// states to store in WorkflowItem for the GUI to report on
|
||||
// fits our current set of workflow states (stored in WorkflowItem.state)
|
||||
public static final int WFSTATE_SUBMIT = 0; // hmm, probably don't need
|
||||
|
||||
public static final int WFSTATE_STEP1POOL = 1; // waiting for a reviewer to
|
||||
// claim it
|
||||
|
||||
public static final int WFSTATE_STEP1 = 2; // task - reviewer has claimed it
|
||||
|
||||
public static final int WFSTATE_STEP2POOL = 3; // waiting for an admin to
|
||||
// claim it
|
||||
|
||||
public static final int WFSTATE_STEP2 = 4; // task - admin has claimed item
|
||||
|
||||
public static final int WFSTATE_STEP3POOL = 5; // waiting for an editor to
|
||||
// claim it
|
||||
|
||||
public static final int WFSTATE_STEP3 = 6; // task - editor has claimed the
|
||||
// item
|
||||
|
||||
public static final int WFSTATE_ARCHIVE = 7; // probably don't need this one
|
||||
// either
|
||||
|
||||
/**
|
||||
* Translate symbolic name of workflow state into number.
|
||||
* The name is case-insensitive. Returns -1 when name cannot
|
||||
* be matched.
|
||||
*
|
||||
* @param state symbolic name of workflow state, must be one of
|
||||
* the elements of workflowText array.
|
||||
* @return numeric workflow state or -1 for error.
|
||||
*/
|
||||
public int getWorkflowID(String state);
|
||||
|
||||
/**
|
||||
* getOwnedTasks() returns a List of WorkflowItems containing the tasks
|
||||
* claimed and owned by an EPerson. The GUI displays this info on the
|
||||
* MyDSpace page.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param e The EPerson we want to fetch owned tasks for.
|
||||
* @return list of basic workflow items
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public List<BasicWorkflowItem> getOwnedTasks(Context context, EPerson e)
|
||||
throws java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* getPooledTasks() returns a List of WorkflowItems an EPerson could claim
|
||||
* (as a reviewer, etc.) for display on a user's MyDSpace page.
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param e The Eperson we want to fetch the pooled tasks for.
|
||||
* @return list of basic workflow items
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public List<BasicWorkflowItem> getPooledTasks(Context context, EPerson e) throws SQLException;
|
||||
|
||||
/**
|
||||
* claim() claims a workflow task for an EPerson
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param workflowItem WorkflowItem to do the claim on
|
||||
* @param e The EPerson doing the claim
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
||||
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
|
||||
* to perform a particular action.
|
||||
*/
|
||||
public void claim(Context context, BasicWorkflowItem workflowItem, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
|
||||
/**
|
||||
* advance() sends an item forward in the workflow (reviewers,
|
||||
* approvers, and editors all do an 'approve' to move the item forward) if
|
||||
* the item arrives at the submit state, then remove the WorkflowItem and
|
||||
* call the archive() method to put it in the archive, and email notify the
|
||||
* submitter of a successful submission
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param workflowItem WorkflowItem do do the approval on
|
||||
* @param e EPerson doing the approval
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
||||
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
|
||||
* to perform a particular action.
|
||||
*/
|
||||
public void advance(Context context, BasicWorkflowItem workflowItem, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* advance() sends an item forward in the workflow (reviewers,
|
||||
* approvers, and editors all do an 'approve' to move the item forward) if
|
||||
* the item arrives at the submit state, then remove the WorkflowItem and
|
||||
* call the archive() method to put it in the archive, and email notify the
|
||||
* submitter of a successful submission
|
||||
*
|
||||
* @param context The relevant DSpace Context.
|
||||
* @param workflowItem WorkflowItem do do the approval on
|
||||
* @param e EPerson doing the approval
|
||||
* @param curate boolean indicating whether curation tasks should be done
|
||||
* @param record boolean indicating whether to record action
|
||||
* @return true if the item was successfully archived
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
||||
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
|
||||
* to perform a particular action.
|
||||
*/
|
||||
public boolean advance(Context context, BasicWorkflowItem workflowItem, EPerson e,
|
||||
boolean curate, boolean record)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* unclaim() returns an owned task/item to the pool
|
||||
*
|
||||
* @param context Context
|
||||
* @param workflowItem WorkflowItem to operate on
|
||||
* @param e EPerson doing the operation
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
|
||||
* @throws AuthorizeException Exception indicating the current user of the context does not have permission
|
||||
* to perform a particular action.
|
||||
*/
|
||||
public void unclaim(Context context, BasicWorkflowItem workflowItem, EPerson e)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
/**
|
||||
* Get the text representing the given workflow state
|
||||
*
|
||||
* @param state the workflow state
|
||||
* @return the text representation
|
||||
*/
|
||||
public String getWorkflowText(int state);
|
||||
|
||||
|
||||
// send notices of curation activity
|
||||
public void notifyOfCuration(Context c, BasicWorkflowItem wi, List<EPerson> ePeople,
|
||||
String taskName, String action, String message) throws SQLException, IOException;
|
||||
|
||||
/**
|
||||
* get the title of the item in this workflow
|
||||
*
|
||||
* @param wi the workflow item object
|
||||
* @return item title
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getItemTitle(BasicWorkflowItem wi) throws SQLException;
|
||||
|
||||
/**
|
||||
* get the name of the eperson who started this workflow
|
||||
*
|
||||
* @param wi the workflow item
|
||||
* @return submitter's name
|
||||
* @throws SQLException An exception that provides information on a database access error or other errors.
|
||||
*/
|
||||
public String getSubmitterName(BasicWorkflowItem wi) throws SQLException;
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.workflowbasic.service;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflowbasic.BasicWorkflowItem;
|
||||
import org.dspace.workflowbasic.TaskListItem;
|
||||
|
||||
/**
|
||||
* Service interface class for the TaskListItem object.
|
||||
* The implementation of this class is responsible for all business logic calls for the TaskListItem object and is
|
||||
* autowired by spring
|
||||
*
|
||||
* @author kevinvandevelde at atmire.com
|
||||
*/
|
||||
public interface TaskListItemService {
|
||||
|
||||
public TaskListItem create(Context context, BasicWorkflowItem workflowItem, EPerson ePerson) throws SQLException;
|
||||
|
||||
public void deleteByWorkflowItem(Context context, BasicWorkflowItem workflowItem) throws SQLException;
|
||||
|
||||
public void deleteByWorkflowItemAndEPerson(Context context, BasicWorkflowItem workflowItem, EPerson ePerson)
|
||||
throws SQLException;
|
||||
|
||||
public void deleteByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
|
||||
public void update(Context context, TaskListItem taskListItem) throws SQLException;
|
||||
|
||||
public List<TaskListItem> findByEPerson(Context context, EPerson ePerson) throws SQLException;
|
||||
}
|
@@ -76,7 +76,7 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* When an item is submitted and is somewhere in a workflow, it has a row in the
|
||||
* WorkflowItem table pointing to it.
|
||||
* cwf_workflowitem table pointing to it.
|
||||
*
|
||||
* Once the item has completed the workflow it will be archived
|
||||
*
|
||||
|
@@ -24,6 +24,7 @@ import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.workflow.WorkflowItem;
|
||||
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
|
||||
|
||||
/**
|
||||
* Class representing an item going through the workflow process in DSpace
|
||||
@@ -62,7 +63,7 @@ public class XmlWorkflowItem implements WorkflowItem {
|
||||
|
||||
/**
|
||||
* Protected constructor, create object using:
|
||||
* {@link org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService#create(Context, Item, Collection)}
|
||||
* {@link XmlWorkflowItemService#create(Context, Item, Collection)}
|
||||
*/
|
||||
protected XmlWorkflowItem() {
|
||||
|
||||
@@ -131,11 +132,4 @@ public class XmlWorkflowItem implements WorkflowItem {
|
||||
public void setPublishedBefore(boolean b) {
|
||||
this.publishedBefore = b;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getState() {
|
||||
// FIXME not used by the xml workflow, should be removed when the basic workflow is removed and the interfaces
|
||||
// simplified
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,17 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
-- Drop the 'workflowitem' and 'tasklistitem' tables
|
||||
-----------------------------------------------------------------------------------
|
||||
|
||||
DROP TABLE workflowitem CASCADE CONSTRAINTS;
|
||||
DROP TABLE tasklistitem CASCADE CONSTRAINTS;
|
||||
|
||||
DROP SEQUENCE workflowitem_seq;
|
||||
DROP SEQUENCE tasklistitem_seq;
|
@@ -0,0 +1,17 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
-- Drop the 'workflowitem' and 'tasklistitem' tables
|
||||
-----------------------------------------------------------------------------------
|
||||
|
||||
DROP TABLE workflowitem CASCADE CONSTRAINTS;
|
||||
DROP TABLE tasklistitem CASCADE CONSTRAINTS;
|
||||
|
||||
DROP SEQUENCE workflowitem_seq;
|
||||
DROP SEQUENCE tasklistitem_seq;
|
@@ -0,0 +1,17 @@
|
||||
--
|
||||
-- The contents of this file are subject to the license and copyright
|
||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
||||
-- tree and available online at
|
||||
--
|
||||
-- http://www.dspace.org/license/
|
||||
--
|
||||
|
||||
-----------------------------------------------------------------------------------
|
||||
-- Drop the 'workflowitem' and 'tasklistitem' tables
|
||||
-----------------------------------------------------------------------------------
|
||||
|
||||
DROP TABLE workflowitem CASCADE;
|
||||
DROP TABLE tasklistitem CASCADE;
|
||||
|
||||
DROP SEQUENCE workflowitem_seq;
|
||||
DROP SEQUENCE tasklistitem_seq;
|
@@ -19,13 +19,6 @@
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<!--If multiple importServices have been configured here but only one is to be used during the lookup step (StartSubmissionLookupStep),
|
||||
this can be accomplished by specifying the property "publication-lookup.url" to the baseAddress of the required importService
|
||||
So for example
|
||||
publication-lookup.url=https://eutils.ncbi.nlm.nih.gov/entrez/eutils/
|
||||
Will result in using the PubmedImportService for the lookup step
|
||||
Omitting this property will default to searching over all configured ImportService implementations
|
||||
-->
|
||||
<bean id="importService" class="org.dspace.importer.external.service.ImportService" lazy-init="false" autowire="byType" destroy-method="destroy">
|
||||
<property name="importSources">
|
||||
<list>
|
||||
@@ -38,7 +31,7 @@
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="ArXivImportService"
|
||||
class="org.dspace.importer.external.arxiv.service.ArXivImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="metadataFieldMapping" ref="ArXivMetadataFieldMapping"/>
|
||||
|
@@ -13,7 +13,6 @@
|
||||
<ref bean="openAccess"/>
|
||||
<ref bean="lease"/>
|
||||
<ref bean="embargoed" />
|
||||
<ref bean="embargoedWithGroupSelect" />
|
||||
<ref bean="administrator"/>
|
||||
<!-- <ref bean="networkAdministration"/> -->
|
||||
</list>
|
||||
@@ -28,11 +27,6 @@
|
||||
</bean>
|
||||
<bean id="lease" class="org.dspace.submit.model.AccessConditionOption">
|
||||
<property name="groupName" value="Anonymous"/>
|
||||
<!--
|
||||
use the selectGroupName to specify the group containing the subgroups
|
||||
that can be used for the policy
|
||||
<property name="selectGroupName" value="Lease Groups"/>
|
||||
-->
|
||||
<property name="name" value="lease"/>
|
||||
<property name="hasStartDate" value="false"/>
|
||||
<property name="hasEndDate" value="true"/>
|
||||
@@ -40,24 +34,11 @@
|
||||
</bean>
|
||||
<bean id="embargoed" class="org.dspace.submit.model.AccessConditionOption">
|
||||
<property name="groupName" value="Anonymous"/>
|
||||
<!--
|
||||
use the selectGroupName to specify the group containing the subgroups
|
||||
that can be used for the policy
|
||||
<property name="selectGroupName" value="Embargoed Groups"/>
|
||||
-->
|
||||
<property name="name" value="embargo"/>
|
||||
<property name="hasStartDate" value="true"/>
|
||||
<property name="startDateLimit" value="+36MONTHS"/>
|
||||
<property name="hasEndDate" value="false"/>
|
||||
|
||||
</bean>
|
||||
<bean id="embargoedWithGroupSelect" class="org.dspace.submit.model.AccessConditionOption">
|
||||
<property name="selectGroupName" value="Embargoed Groups"/>
|
||||
<property name="name" value="embargo"/>
|
||||
<property name="hasStartDate" value="true"/>
|
||||
<property name="startDateLimit" value="+36MONTHS"/>
|
||||
<property name="hasEndDate" value="false"/>
|
||||
</bean>
|
||||
</bean>
|
||||
<bean id="administrator" class="org.dspace.submit.model.AccessConditionOption">
|
||||
<property name="groupName" value="Administrator"/>
|
||||
<property name="name" value="administrator"/>
|
||||
|
@@ -8,7 +8,38 @@
|
||||
|
||||
<bean class="org.dspace.external.provider.impl.MockDataProvider" init-method="init">
|
||||
<property name="sourceIdentifier" value="mock"/>
|
||||
</bean>
|
||||
|
||||
<!-- SHERPA data providers set up to use mock SHERPA service -->
|
||||
<bean class="org.dspace.external.provider.impl.SHERPAv2JournalISSNDataProvider" init-method="init">
|
||||
<property name="sourceIdentifier" value="sherpaJournalIssn"/>
|
||||
<property name="sherpaService">
|
||||
<bean class="org.dspace.app.sherpa.MockSHERPAService">
|
||||
<property name="maxNumberOfTries" value="3"/>
|
||||
<property name="sleepBetweenTimeouts" value="2000"/>
|
||||
<property name="timeout" value="5000"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
<bean class="org.dspace.external.provider.impl.SHERPAv2JournalDataProvider" init-method="init">
|
||||
<property name="sourceIdentifier" value="sherpaJournal"/>
|
||||
<property name="sherpaService">
|
||||
<bean class="org.dspace.app.sherpa.MockSHERPAService">
|
||||
<property name="maxNumberOfTries" value="3"/>
|
||||
<property name="sleepBetweenTimeouts" value="2000"/>
|
||||
<property name="timeout" value="5000"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
<bean class="org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider" init-method="init">
|
||||
<property name="sourceIdentifier" value="sherpaPublisher"/>
|
||||
<property name="sherpaService">
|
||||
<bean class="org.dspace.app.sherpa.MockSHERPAService">
|
||||
<property name="maxNumberOfTries" value="3"/>
|
||||
<property name="sleepBetweenTimeouts" value="2000"/>
|
||||
<property name="timeout" value="5000"/>
|
||||
</bean>
|
||||
</property>
|
||||
</bean>
|
||||
<bean class="org.dspace.external.provider.impl.OrcidV3AuthorDataProvider" init-method="init">
|
||||
<property name="sourceIdentifier" value="orcid"/>
|
||||
|
@@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
|
||||
http://www.springframework.org/schema/context
|
||||
http://www.springframework.org/schema/context/spring-context-2.5.xsd"
|
||||
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
|
||||
|
||||
<context:annotation-config /> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"
|
||||
id="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService">
|
||||
<property name="issnItemExtractors">
|
||||
<list>
|
||||
<bean class="org.dspace.app.sherpa.submit.MetadataValueISSNExtractor">
|
||||
<property name="metadataList">
|
||||
<list>
|
||||
<value>dc.identifier.issn</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
<!-- Uncomment this bean if you have SHERPARoMEOJournalTitle enabled
|
||||
<bean class="org.dspace.app.sherpa.submit.MetadataAuthorityISSNExtractor">
|
||||
<property name="metadataList">
|
||||
<list>
|
||||
<value>dc.title.alternative</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean> -->
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
</beans>
|
@@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
|
||||
The contents of this file are subject to the license and copyright
|
||||
detailed in the LICENSE and NOTICE files at the root of the source
|
||||
tree and available online at
|
||||
|
||||
http://www.dspace.org/license/
|
||||
|
||||
-->
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans.xsd
|
||||
http://www.springframework.org/schema/context
|
||||
http://www.springframework.org/schema/context/spring-context.xsd"
|
||||
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<bean class="org.dspace.app.sherpa.MockSHERPAService" id="org.dspace.app.sherpa.MockSHERPAService">
|
||||
<property name="maxNumberOfTries" value="3"/>
|
||||
<property name="sleepBetweenTimeouts" value="2000"/>
|
||||
<property name="timeout" value="5000"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitService"
|
||||
id="org.dspace.app.sherpa.submit.SHERPASubmitService">
|
||||
<property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService"/>
|
||||
<property name="configuration" ref="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"/>
|
||||
</bean>
|
||||
|
||||
</beans>
|
@@ -0,0 +1,135 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
|
||||
/**
|
||||
* Mock implementation for SHERPA API service (used by SHERPA submit service to check
|
||||
* journal policies)
|
||||
* This class will return mock SHERPA responses so they can be parsed and turned into external data objects downstream
|
||||
*
|
||||
* @author Kim Shepherd
|
||||
*/
|
||||
public class MockSHERPAService extends SHERPAService {
|
||||
|
||||
/**
|
||||
* Simple overridden 'searchByJournalISSN' so that we do attempt to build the URI but rather than make
|
||||
* an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our
|
||||
* test resources.
|
||||
* If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be
|
||||
* returned.
|
||||
* @param query ISSN string to pass in an "issn equals" API query
|
||||
* @return SHERPAResponse
|
||||
*/
|
||||
@Override
|
||||
public SHERPAResponse searchByJournalISSN(String query) {
|
||||
return performRequest("publication", "issn", "equals", query, 0, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple overridden performRequest so that we do attempt to build the URI but rather than make
|
||||
* an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our
|
||||
* test resources.
|
||||
* If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be
|
||||
* returned.
|
||||
* @param value a journal / publication name, or ID, etc.
|
||||
* @return SHERPAResponse
|
||||
*/
|
||||
@Override
|
||||
public SHERPAResponse performRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
try {
|
||||
String endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
String apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
|
||||
// Rather than search, we will simply attempt to build the URI using the real pepare method
|
||||
// so that any errors there are caught, and will return a valid response for The Lancet
|
||||
InputStream content = null;
|
||||
try {
|
||||
// Prepare the URI - this will not be used but should be evaluated
|
||||
// in case a syntax exception is thrown
|
||||
URI uri = prepareQuery(value, endpoint, apiKey);
|
||||
if (uri == null) {
|
||||
return new SHERPAResponse("Error building URI");
|
||||
}
|
||||
|
||||
// Get mock JSON - in this case, a known good result for The Lancet
|
||||
content = getClass().getResourceAsStream("thelancet.json");
|
||||
|
||||
// Parse JSON input stream and return response for later evaluation
|
||||
return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
|
||||
|
||||
} catch (URISyntaxException e) {
|
||||
// This object will be marked as having an error for later evaluation
|
||||
return new SHERPAResponse(e.getMessage());
|
||||
} finally {
|
||||
// Close input stream
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// This object will be marked as having an error for later evaluation
|
||||
return new SHERPAResponse(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make
|
||||
* an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our
|
||||
* test resources.
|
||||
* If URI creation, parsing, or IO fails along the way, a SHERPAPublisherResponse with an error message set will be
|
||||
* returned.
|
||||
* @param value a journal / publication name, or ID, etc.
|
||||
* @return SHERPAResponse
|
||||
*/
|
||||
@Override
|
||||
public SHERPAPublisherResponse performPublisherRequest(String type, String field, String predicate, String value,
|
||||
int start, int limit) {
|
||||
try {
|
||||
String endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
String apiKey = configurationService.getProperty("sherpa.romeo.apikey");
|
||||
|
||||
// Rather than search, we will simply attempt to build the URI using the real pepare method
|
||||
// so that any errors there are caught, and will return a valid response for The Lancet
|
||||
InputStream content = null;
|
||||
try {
|
||||
// Prepare the URI - this will not be used but should be evaluated
|
||||
// in case a syntax exception is thrown
|
||||
URI uri = prepareQuery(value, endpoint, apiKey);
|
||||
|
||||
// Get mock JSON - in this case, a known good result for PLOS
|
||||
content = getClass().getResourceAsStream("plos.json");
|
||||
|
||||
// Parse JSON input stream and return response for later evaluation
|
||||
return new SHERPAPublisherResponse(content, SHERPAPublisherResponse.SHERPAFormat.JSON);
|
||||
|
||||
} catch (URISyntaxException e) {
|
||||
// This object will be marked as having an error for later evaluation
|
||||
return new SHERPAPublisherResponse(e.getMessage());
|
||||
} finally {
|
||||
// Close input stream
|
||||
if (content != null) {
|
||||
content.close();
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// This object will be marked as having an error for later evaluation
|
||||
return new SHERPAPublisherResponse(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,355 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.velocity.exception.ResourceNotFoundException;
|
||||
import org.dspace.AbstractDSpaceTest;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.factory.ExternalServiceFactory;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
import org.dspace.external.service.ExternalDataService;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SHERPADataProviderTest extends AbstractDSpaceTest {
|
||||
|
||||
ExternalDataService externalDataService;
|
||||
ExternalDataProvider sherpaJournalProvider;
|
||||
ExternalDataProvider sherpaPublisherProvider;
|
||||
ExternalDataProvider sherpaJournalIssnProvider;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Set up External Service Factory and set data providers
|
||||
externalDataService = ExternalServiceFactory.getInstance().getExternalDataService();
|
||||
sherpaJournalProvider = externalDataService.getExternalDataProvider("sherpaJournal");
|
||||
sherpaPublisherProvider = externalDataService.getExternalDataProvider("sherpaPublisher");
|
||||
sherpaJournalIssnProvider =
|
||||
externalDataService.getExternalDataProvider("sherpaJournalIssn");
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalISSNProvider for an ISSN and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testGetJournalISSNExternalObject() {
|
||||
// Get a response with a single valid journal, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = The Lancet
|
||||
// dc.identifier.issn 0140-6736
|
||||
// getId() 0140-6736
|
||||
|
||||
String validIssn = "0140-6736";
|
||||
String validName = "The Lancet";
|
||||
Optional<ExternalDataObject> externalDataObject = sherpaJournalIssnProvider.getExternalDataObject(validIssn);
|
||||
// If data object isn't here, throw a resource not found exception
|
||||
ExternalDataObject dataObject = externalDataObject.orElseThrow(
|
||||
() -> new ResourceNotFoundException("Couldn't find a data object for ISSN " + validIssn));
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("issn")) {
|
||||
identifier = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.uri match the expected value?
|
||||
assertEquals("Identifier ISSN must equal " + validIssn, validIssn, identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalISSNProvider for an ISSN and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testSearchJournalISSNExternalObjects() {
|
||||
// Get a response with a single valid journal, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = The Lancet
|
||||
// dc.identifier.issn 0140-6736
|
||||
// getId() 0140-6736
|
||||
|
||||
String validIssn = "0140-6736";
|
||||
String validName = "The Lancet";
|
||||
List<ExternalDataObject> externalDataObjects =
|
||||
sherpaJournalIssnProvider.searchExternalDataObjects(validIssn, 0, 1);
|
||||
|
||||
// Assert that the response is valid and not empty
|
||||
assertTrue("Couldn't find a data object for publication name " + validName,
|
||||
externalDataObjects != null && !externalDataObjects.isEmpty());
|
||||
|
||||
// Get the first search result for inspection
|
||||
ExternalDataObject dataObject = externalDataObjects.get(0);
|
||||
|
||||
// Assert that the data object itself is not null
|
||||
assertNotNull("External data object must not be null", dataObject);
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("issn")) {
|
||||
identifier = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.uri match the expected value?
|
||||
assertEquals("Identifier ISSN must equal " + validIssn, validIssn, identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalProvider for a journal and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testGetJournalExternalObject() {
|
||||
// Get a response with a single valid journal, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = The Lancet
|
||||
// dc.identifier.issn 0140-6736
|
||||
// getId() 0140-6736
|
||||
|
||||
String validIssn = "0140-6736";
|
||||
String validName = "The Lancet";
|
||||
Optional<ExternalDataObject> externalDataObject = sherpaJournalProvider.getExternalDataObject(validName);
|
||||
// If data object isn't here, throw a resource not found exception
|
||||
ExternalDataObject dataObject = externalDataObject.orElseThrow(
|
||||
() -> new ResourceNotFoundException("Couldn't find a data object for publication name " + validName));
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("issn")) {
|
||||
identifier = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.uri match the expected value?
|
||||
assertEquals("Identifier ISSN must equal " + validIssn, validIssn, identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalProvider for a journal and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testSearchJournalObjects() {
|
||||
// Get a response with a single valid journal, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources. We are searching here, using the search method but
|
||||
// will just return 1 result since that is what our test resource matches, and is sufficient for testing
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = The Lancet
|
||||
// dc.identifier.issn 0140-6736
|
||||
// getId() 0140-6736
|
||||
|
||||
String validIssn = "0140-6736";
|
||||
String validName = "The Lancet";
|
||||
List<ExternalDataObject> externalDataObjects =
|
||||
sherpaJournalProvider.searchExternalDataObjects(validName, 0, 1);
|
||||
|
||||
// Assert that the response is valid and not empty
|
||||
assertTrue("Couldn't find a data object for publication name " + validName,
|
||||
externalDataObjects != null && !externalDataObjects.isEmpty());
|
||||
|
||||
// Get the first search result for inspection
|
||||
ExternalDataObject dataObject = externalDataObjects.get(0);
|
||||
|
||||
// Assert that the data object itself is not null
|
||||
assertNotNull("External data object must not be null", dataObject);
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("issn")) {
|
||||
identifier = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.uri match the expected value?
|
||||
assertEquals("Identifier ISSN must equal " + validIssn, validIssn, identifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalProvider for a journal and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testGetPublisherExternalObject() {
|
||||
// Get a response with a single valid ISSN, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = Public Library of Science
|
||||
// dc.identifier.sherpaPublisher 112
|
||||
// dc.identifier.other http://www.plos.org/
|
||||
|
||||
// Set expected values
|
||||
String validName = "Public Library of Science";
|
||||
String validIdentifier = "112";
|
||||
String validUrl = "http://www.plos.org/";
|
||||
|
||||
// Retrieve the dataobject(s) from the data provider
|
||||
Optional<ExternalDataObject> externalDataObject = sherpaPublisherProvider.getExternalDataObject(validName);
|
||||
// If data object isn't here, throw a resource not found exception
|
||||
ExternalDataObject dataObject = externalDataObject.orElseThrow(
|
||||
() -> new ResourceNotFoundException("Couldn't find a data object for publication name " + validName));
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
String url = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("sherpaPublisher")) {
|
||||
identifier = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("other")) {
|
||||
url = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.sherpaPublisher match the expected value?
|
||||
assertEquals("Publisher ID must equal " + validIdentifier, validIdentifier, identifier);
|
||||
|
||||
// Does dc.identifier.other match the expected value?
|
||||
assertEquals("Publisher URL must equal " + validUrl, validUrl, url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching the SHERPAv2JournalProvider for a journal and inspect the returned data object
|
||||
* The provider is configured to use the Mock SHERPAService.
|
||||
*/
|
||||
@Test
|
||||
public void testSearchPublisherExternalObjects() {
|
||||
// Get a response with a single valid ISSN, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
// We expect to see the following values set correctly:
|
||||
// dc.title = Public Library of Science
|
||||
// dc.identifier.sherpaPublisher 112
|
||||
// dc.identifier.other http://www.plos.org/
|
||||
|
||||
// Set expected values
|
||||
String validName = "Public Library of Science";
|
||||
String validIdentifier = "112";
|
||||
String validUrl = "http://www.plos.org/";
|
||||
|
||||
// Retrieve the dataobject(s) from the data provider
|
||||
List<ExternalDataObject> externalDataObjects =
|
||||
sherpaPublisherProvider.searchExternalDataObjects(validName, 0, 1);
|
||||
|
||||
// Assert that the response is valid and not empty
|
||||
assertTrue("Couldn't find a data object for publication name " + validName,
|
||||
externalDataObjects != null && !externalDataObjects.isEmpty());
|
||||
|
||||
ExternalDataObject dataObject = externalDataObjects.get(0);
|
||||
|
||||
// Assert that the data object itself is not null
|
||||
assertNotNull("External data object must not be null", dataObject);
|
||||
|
||||
// Instantiate some Strings that we'll set if we find the expected metadata
|
||||
String title = null;
|
||||
String identifier = null;
|
||||
String url = null;
|
||||
for (MetadataValueDTO metadataValue : dataObject.getMetadata()) {
|
||||
if (metadataValue.getSchema().equalsIgnoreCase("dc") &&
|
||||
metadataValue.getElement().equalsIgnoreCase("title")) {
|
||||
title = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("sherpaPublisher")) {
|
||||
identifier = metadataValue.getValue();
|
||||
} else if (metadataValue.getSchema().equalsIgnoreCase("dc")
|
||||
&& metadataValue.getElement().equalsIgnoreCase("identifier")
|
||||
&& metadataValue.getQualifier().equalsIgnoreCase("other")) {
|
||||
url = metadataValue.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
// Does dc.title match the expected value?
|
||||
assertEquals("Title metadata must equal '" + validName + "' ", validName, title);
|
||||
|
||||
// Does dc.identifier.sherpaPublisher match the expected value?
|
||||
assertEquals("Publisher ID must equal " + validIdentifier, validIdentifier, identifier);
|
||||
|
||||
// Does dc.identifier.other match the expected value?
|
||||
assertEquals("Publisher URL must equal " + validUrl, validUrl, url);
|
||||
}
|
||||
}
|
@@ -0,0 +1,237 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.dspace.AbstractDSpaceTest;
|
||||
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.app.sherpa.v2.SHERPAUtils;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Integration tests for SHERPA service
|
||||
* @author Kim Shepherd
|
||||
* @see MockSHERPAService
|
||||
*/
|
||||
public class SHERPAServiceTest extends AbstractDSpaceTest {
|
||||
|
||||
protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
// Spring SHERPA service. For testing purposes, this will use a mock service
|
||||
// that doesn't perform actual HTTP queries but does construct URIs
|
||||
// and returns a valid response (The Lancet) if no other errors are encountered
|
||||
protected SHERPAService sherpaService = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("org.dspace.app.sherpa.MockSHERPAService", MockSHERPAService.class);
|
||||
|
||||
public SHERPAServiceTest() {
|
||||
|
||||
}
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Test searching by Journal ISSN directly against SHERPA service
|
||||
*/
|
||||
@Test
|
||||
public void testSearchByJournalISSN() {
|
||||
// Get a response with a single valid ISSN, using the mock service which will return a response based on
|
||||
// thelancet.json stored response in test resources
|
||||
String validISSN = "0140-6736";
|
||||
SHERPAResponse sherpaResponse = sherpaService.searchByJournalISSN(validISSN);
|
||||
|
||||
// This response should NOT contain an error (isError() should be false)
|
||||
assertFalse("Response contained an error flag / message: " + sherpaResponse.getMessage(),
|
||||
sherpaResponse.isError());
|
||||
|
||||
// This response should contain a single journal called The Lancet
|
||||
String expectedTitle = "The Lancet";
|
||||
assertTrue("Response did not contain a journal with the expected title '" + expectedTitle + '"',
|
||||
expectedTitle.equals(sherpaResponse.getJournals().get(0).getTitles().get(0)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that the URIBuilder and sanitation procedures are producing expected URLs, comparing the results
|
||||
* to manually compiled strings
|
||||
* @throws URISyntaxException
|
||||
*/
|
||||
@Test
|
||||
public void testUriConstruction() throws URISyntaxException, UnsupportedEncodingException {
|
||||
// Get values for base URL and api key parameter
|
||||
String endpoint = configurationService.getProperty("sherpa.romeo.url",
|
||||
"https://v2.sherpa.ac.uk/cgi/retrieve");
|
||||
String apiKey = configurationService.getProperty("sherpa.romeo.apikey", null);
|
||||
|
||||
// Compare expected outputs
|
||||
// Valid ISSN (The Lancet)
|
||||
String validISSN = "0140-6736";
|
||||
// Invalid ISSN that also contains characters we strip out in sanitisation
|
||||
String invalidISSN = "{TEST}";
|
||||
|
||||
// Characters like { and } that conflict with JSON should be stripped from the filter query
|
||||
assertEquals("JSON filter query sanitisation not stripping special characters",
|
||||
"TEST", SHERPAUtils.sanitiseQuery(invalidISSN));
|
||||
|
||||
// The valid string should look like this (assuming default configuration)
|
||||
// https://v2.sherpa.ac.uk/cgi/retrieve?item-type=publication&filter=[["issn","equals","0140-6736"]]&format=Json
|
||||
String validUrl = new URIBuilder(buildUrlString(validISSN, endpoint, apiKey)).toString();
|
||||
assertEquals("Built and expected valid URLs differ", validUrl,
|
||||
sherpaService.constructHttpGet("publication", "issn", "equals", validISSN)
|
||||
.getURI().toASCIIString());
|
||||
|
||||
// The invalid string should look like this (assuming default configuration)
|
||||
// https://v2.sherpa.ac.uk/cgi/retrieve?item-type=publication&filter=[["issn","equals","TEST"]]&format=Json
|
||||
// Note - it should return 0 results from the API, but these services are not intended to validate the ISSN
|
||||
// query, though they do sanitise it for the JSON input type, hence expecting the braces to be stripped
|
||||
String invalidUrl = new URIBuilder(buildUrlString(invalidISSN, endpoint, apiKey)).toString();
|
||||
assertEquals("Built and expected invalid URLs differ", invalidUrl,
|
||||
sherpaService.constructHttpGet("publication", "issn", "equals", invalidISSN)
|
||||
.getURI().toASCIIString());
|
||||
|
||||
|
||||
// The null query string should look like this (assuming default configuration)
|
||||
// https://v2.sherpa.ac.uk/cgi/retrieve?item-type=publication&filter=[["issn","equals",""]]&format=Json
|
||||
// Note - it should return 0 results from the API, but all we do is log a warning, this is not considered
|
||||
// a fatal URI syntax exception (the remote call does work, and returns 0 items as valid JSON)
|
||||
String nullUrl = new URIBuilder(buildUrlString(null, endpoint, apiKey)).toString();
|
||||
assertEquals("Built and expected invalid URLs differ", nullUrl,
|
||||
sherpaService.constructHttpGet("publication", "issn", "equals", null)
|
||||
.getURI().toASCIIString());
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Thorough test of returned SHERPAResponse object to ensure all expected fields are there and valid
|
||||
*/
|
||||
@Test
|
||||
public void testJournalResponse() {
|
||||
// Valid ISSN (The Lancet)
|
||||
String validISSN = "0140-6736";
|
||||
SHERPAResponse response = sherpaService.searchByJournalISSN(validISSN);
|
||||
|
||||
// Assert response is not error, or fail with message
|
||||
assertFalse("Response was flagged as 'isError'", response.isError());
|
||||
|
||||
// Assert response has at least one journal result, or fail with message
|
||||
assertTrue("List of journals did not contain at least one parsed journal",
|
||||
CollectionUtils.isNotEmpty(response.getJournals()));
|
||||
|
||||
// Assert response has a journal with title "The Lancet", or fail with message
|
||||
String expectedTitle = "The Lancet";
|
||||
assertTrue("Journal title did not match expected '" + expectedTitle + "' value",
|
||||
CollectionUtils.isNotEmpty(response.getJournals().get(0).getTitles())
|
||||
&& expectedTitle.equals(response.getJournals().get(0).getTitles().get(0)));
|
||||
|
||||
// Assert response has expected publication (metadata) URI
|
||||
String expectedSystemMetadataUri = "http://v2.sherpa.ac.uk/id/publication/23803";
|
||||
assertTrue("Response metadata URI did not match expected '" + expectedSystemMetadataUri
|
||||
+ "' value", expectedSystemMetadataUri.equals(response.getMetadata().getUri()));
|
||||
|
||||
// Assert response has at least one policy
|
||||
assertTrue("Response did not contain at least one archiving policy",
|
||||
CollectionUtils.isNotEmpty(response.getJournals().get(0).getPolicies()));
|
||||
|
||||
// Assert response has at least one permitted version
|
||||
assertTrue("Response did not contain at least one permitted version",
|
||||
CollectionUtils.isNotEmpty(response.getJournals().get(0).getPolicies().get(0).getPermittedVersions()));
|
||||
|
||||
// Assert journal has at least one publisher
|
||||
assertTrue("Response did not contain at least one publisher",
|
||||
CollectionUtils.isNotEmpty(response.getJournals().get(0).getPublishers()));
|
||||
|
||||
// Assert first publisher has name 'Elsevier'
|
||||
String expectedPublisherName = "Elsevier";
|
||||
assertTrue("Response did not contain expected publisher name '" + expectedPublisherName + "'",
|
||||
expectedPublisherName.equals(response.getJournals().get(0).getPublisher().getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Thorough test of returned SHERPAPublisherResponse object to ensure all expected fields are there and valid
|
||||
*/
|
||||
@Test
|
||||
public void testPublisherResponse() {
|
||||
// Set up basic query and query the (mock) service
|
||||
String publisherName = "Public Library of Science";
|
||||
SHERPAPublisherResponse response = sherpaService.performPublisherRequest(
|
||||
"publisher", "name", "equals", publisherName, 0, 1);
|
||||
|
||||
// Assert response is not error, or fail with message
|
||||
assertFalse("Response was flagged as 'isError'", response.isError());
|
||||
|
||||
// Assert response has at least one publisher result, or fail with message
|
||||
assertTrue("List of publishers did not contain at least one parsed publisher",
|
||||
CollectionUtils.isNotEmpty(response.getPublishers()));
|
||||
|
||||
// Assert response has a publisher with name "Public Library of Science", or fail with message
|
||||
String expectedName = "Public Library of Science";
|
||||
assertEquals("Publisher name did not match expected '" + expectedName + "' value",
|
||||
expectedName, response.getPublishers().get(0).getName());
|
||||
|
||||
// Assert response has expected publisher URL
|
||||
String expectedUrl = "http://www.plos.org/";
|
||||
assertEquals("Response metadata URI did not match expected '" + expectedUrl
|
||||
+ "' value", expectedUrl, response.getPublishers().get(0).getUri());
|
||||
|
||||
// Assert response has at expected publisher ID
|
||||
String expectedId = "112";
|
||||
assertEquals("Response publisher ID did not match expected ID " + expectedId,
|
||||
expectedId, response.getPublishers().get(0).getIdentifier());
|
||||
}
|
||||
|
||||
/**
|
||||
* Build URL manually with string builder to compare to URIBuilder usage in actual service
|
||||
* @param query
|
||||
* @param endpoint
|
||||
* @param apiKey
|
||||
* @return
|
||||
*/
|
||||
public static String buildUrlString(String query, String endpoint, String apiKey) {
|
||||
query = SHERPAUtils.sanitiseQuery(query);
|
||||
StringBuilder expected = new StringBuilder();
|
||||
String filter = "[[\"issn\",\"equals\",\"" + query + "\"]]";
|
||||
expected.append(endpoint).append("?")
|
||||
.append("item-type=publication&filter=").append(URLEncoder.encode(filter, StandardCharsets.UTF_8))
|
||||
.append("&format=Json&offset=0&limit=1");
|
||||
if (StringUtils.isNotBlank(apiKey)) {
|
||||
expected.append("&api-key=").append(apiKey);
|
||||
}
|
||||
return expected.toString();
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,128 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.sherpa.submit;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.app.sherpa.v2.SHERPAResponse;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.MetadataValue;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.InstallItemService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.WorkspaceItemService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* SHERPASubmitServiceTest creates a dummy item with an ISSN in its metadata, and makes sure
|
||||
* that the ISSN is detected and passed to SHERPAService for a mock query
|
||||
*/
|
||||
public class SHERPASubmitServiceTest extends AbstractUnitTest {
|
||||
|
||||
// Set up services
|
||||
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
|
||||
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
|
||||
protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
|
||||
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
|
||||
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
|
||||
protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
|
||||
SHERPASubmitService sherpaSubmitService = DSpaceServicesFactory.getInstance().getServiceManager()
|
||||
.getServiceByName("org.dspace.app.sherpa.submit.SHERPASubmitService", SHERPASubmitService.class);
|
||||
Collection testCollection = null;
|
||||
Community testCommunity = null;
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpClass() {
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownClass() {
|
||||
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws SQLException, AuthorizeException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
// Create primary Test community
|
||||
testCommunity = communityService.create(null, context);
|
||||
communityService
|
||||
.addMetadata(context, testCommunity, MetadataSchemaEnum.DC.getName(),
|
||||
"title", null, null, "Test Community");
|
||||
communityService.update(context, testCommunity);
|
||||
|
||||
// Create our primary Test Collection
|
||||
testCollection = collectionService.create(context, testCommunity);
|
||||
collectionService.addMetadata(context, testCollection, "dc", "title", null, null,
|
||||
"Test Collection");
|
||||
collectionService.update(context, testCollection);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
context.restoreAuthSystemState();
|
||||
testCommunity = null;
|
||||
testCollection = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the ISSN extraction
|
||||
*/
|
||||
@Test
|
||||
public void testGetISSNs() throws AuthorizeException, SQLException {
|
||||
String validISSN = "0140-6736";
|
||||
// Create and install an item with an ISSN
|
||||
WorkspaceItem testWorkspaceItem = workspaceItemService.create(context, testCollection, false);
|
||||
Item testItem = installItemService.installItem(context, testWorkspaceItem);
|
||||
|
||||
// Set up ISSN metadatavalue
|
||||
MetadataField issnField = metadataFieldService.
|
||||
findByString(context, "dc.identifier.issn", '.');
|
||||
MetadataValue metadataValue = metadataValueService.create(context, testItem, issnField);
|
||||
metadataValue.setValue(validISSN);
|
||||
|
||||
// Get responses from SHERPA submit service, which should inspect item ISSNs and perform search
|
||||
// on the mock SHERPA service
|
||||
List<SHERPAResponse> responses = sherpaSubmitService.searchRelatedJournals(context, testItem);
|
||||
|
||||
// Make sure response is not null or empty
|
||||
assertTrue("Response list should not be null or empty",
|
||||
responses != null && !responses.isEmpty());
|
||||
|
||||
// For each response (there should be only one based on test data) perform the standard set
|
||||
// of thorough parsing tests
|
||||
for (SHERPAResponse response : responses) {
|
||||
// Assert response is not error, or fail with message
|
||||
assertFalse("Response was flagged as 'isError'", response.isError());
|
||||
|
||||
// Skip remainder of parsing tests - these are already done in SHERPAServiceTEst
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -83,6 +83,21 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
protected <B extends AbstractDSpaceObjectBuilder<T>> B addMetadataValue(final T dso, final String schema,
|
||||
final String element,
|
||||
final String qualifier,
|
||||
final String language,
|
||||
final String value,
|
||||
final String authority,
|
||||
final int confidence) {
|
||||
try {
|
||||
getService().addMetadata(context, dso, schema, element, qualifier, language, value, authority, confidence);
|
||||
} catch (Exception e) {
|
||||
return handleException(e);
|
||||
}
|
||||
return (B) this;
|
||||
}
|
||||
|
||||
protected <B extends AbstractDSpaceObjectBuilder<T>> B setMetadataSingleValue(final T dso, final String schema,
|
||||
final String element,
|
||||
final String qualifier,
|
||||
|
@@ -16,10 +16,8 @@ import org.dspace.content.Item;
|
||||
import org.dspace.content.LicenseUtils;
|
||||
import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.event.Event;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.state.Step;
|
||||
import org.dspace.xmlworkflow.state.Workflow;
|
||||
@@ -88,15 +86,13 @@ public class ClaimedTaskBuilder extends AbstractBuilder<ClaimedTask, ClaimedTask
|
||||
// temporary switch to the wf user
|
||||
EPerson submitter = context.getCurrentUser();
|
||||
context.setCurrentUser(user);
|
||||
XmlWorkflowServiceFactory factory = (XmlWorkflowServiceFactory) XmlWorkflowServiceFactory.getInstance();
|
||||
XmlWorkflowServiceFactory factory = XmlWorkflowServiceFactory.getInstance();
|
||||
Workflow workflow = factory.getWorkflowFactory().getWorkflow(task.getWorkflowItem().getCollection());
|
||||
Step step = workflow.getStep(task.getStepID());
|
||||
WorkflowActionConfig currentActionConfig = step.getActionConfig(task.getActionID());
|
||||
workflowService
|
||||
.doState(context, user, null, task.getWorkflowItem().getID(), workflow,
|
||||
currentActionConfig);
|
||||
context.addEvent(new Event(Event.MODIFY, Constants.ITEM, task.getWorkflowItem().getItem().getID(), null,
|
||||
itemService.getIdentifiers(context, task.getWorkflowItem().getItem())));
|
||||
claimedTask = getService().findByWorkflowIdAndEPerson(context, workflowItem, user);
|
||||
// restore the submitter as current user
|
||||
context.setCurrentUser(submitter);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user