diff --git a/.github/workflows/label_merge_conflicts.yml b/.github/workflows/label_merge_conflicts.yml
new file mode 100644
index 0000000000..dcbab18f1b
--- /dev/null
+++ b/.github/workflows/label_merge_conflicts.yml
@@ -0,0 +1,25 @@
+# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
+name: Check for merge conflicts
+
+# Run whenever the "main" branch is updated
+# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
+on:
+ push:
+ branches:
+ - main
+
+jobs:
+ triage:
+ runs-on: ubuntu-latest
+ steps:
+ # See: https://github.com/mschilde/auto-label-merge-conflicts/
+ - name: Auto-label PRs with merge conflicts
+ uses: mschilde/auto-label-merge-conflicts@v2.0
+ # Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
+ # Note, the authentication token is created automatically
+ # See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
+ with:
+ CONFLICT_LABEL_NAME: 'merge conflict'
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ # Ignore errors
+ continue-on-error: true
diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml
index 5b32fdec79..41ad956d82 100644
--- a/dspace-api/pom.xml
+++ b/dspace-api/pom.xml
@@ -325,6 +325,14 @@
apache-jena-libs
pom
+
+
+
+ org.glassfish.jersey.inject
+ jersey-hk2
+ ${jersey.version}
+
+
commons-cli
commons-cli
diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java
index 2e4f333820..3332440f06 100644
--- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java
+++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExport.java
@@ -10,10 +10,14 @@ package org.dspace.app.bulkedit;
import java.sql.SQLException;
import org.apache.commons.cli.ParseException;
+import org.apache.commons.lang3.StringUtils;
+import org.dspace.content.DSpaceObject;
+import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
+import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.utils.DSpace;
@@ -41,8 +45,7 @@ public class MetadataExport extends DSpaceRunnable {
+
+
+ @Override
+ public Options getOptions() {
+ Options options = super.getOptions();
+ options.addOption("f", "file", true, "destination where you want file written");
+ options.getOption("f").setType(OutputStream .class);
+ options.getOption("f").setRequired(true);
+ super.options = options;
+ return options;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java
index 65c0ddd8cf..0c513c4667 100644
--- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java
+++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java
@@ -7,7 +7,6 @@
*/
package org.dspace.app.bulkedit;
-import java.io.OutputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Options;
@@ -56,9 +55,6 @@ public class MetadataExportScriptConfiguration extends
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.getOption("i").setType(String.class);
- options.addOption("f", "file", true, "destination where you want file written");
- options.getOption("f").setType(OutputStream.class);
- options.getOption("f").setRequired(true);
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.getOption("a").setType(boolean.class);
diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java
index eb0a4e2935..67086c1536 100644
--- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java
+++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImport.java
@@ -182,24 +182,7 @@ public class MetadataImport extends DSpaceRunnable {
+
+ @Override
+ public Options getOptions() {
+ Options options = super.getOptions();
+ options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
+ options.getOption("e").setType(String.class);
+ options.getOption("e").setRequired(true);
+ super.options = options;
+ return options;
+ }
}
diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java
index 9ea50b7de5..07e6a9aec9 100644
--- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java
+++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataImportScriptConfiguration.java
@@ -57,9 +57,6 @@ public class MetadataImportScriptConfiguration extends
options.addOption("f", "file", true, "source file");
options.getOption("f").setType(InputStream.class);
options.getOption("f").setRequired(true);
- options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
- options.getOption("e").setType(String.class);
- options.getOption("e").setRequired(true);
options.addOption("s", "silent", false,
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
options.getOption("s").setType(boolean.class);
diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java
index bb35cd3ff9..e2743951e7 100644
--- a/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java
+++ b/dspace-api/src/main/java/org/dspace/app/sitemap/GenerateSitemaps.java
@@ -27,6 +27,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
@@ -84,6 +85,9 @@ public class GenerateSitemaps {
options
.addOption("p", "ping", true,
"ping specified search engine URL");
+ options
+ .addOption("d", "delete", false,
+ "delete sitemaps dir and its contents");
CommandLine line = null;
@@ -105,10 +109,9 @@ public class GenerateSitemaps {
}
/*
- * Sanity check -- if no sitemap generation or pinging to do, print
- * usage
+ * Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
*/
- if (line.getArgs().length != 0 || line.hasOption('b')
+ if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
&& line.hasOption('s') && !line.hasOption('g')
&& !line.hasOption('m') && !line.hasOption('y')
&& !line.hasOption('p')) {
@@ -123,6 +126,10 @@ public class GenerateSitemaps {
generateSitemaps(!line.hasOption('b'), !line.hasOption('s'));
}
+ if (line.hasOption('d')) {
+ deleteSitemaps();
+ }
+
if (line.hasOption('a')) {
pingConfiguredSearchEngines();
}
@@ -140,6 +147,29 @@ public class GenerateSitemaps {
System.exit(0);
}
+ /**
+ * Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml).
+ *
+ * @throws SQLException if a database error occurs.
+ * @throws IOException if IO error occurs.
+ */
+ public static void generateSitemapsScheduled() throws IOException, SQLException {
+ generateSitemaps(true, true);
+ }
+
+ /**
+ * Delete the sitemaps directory and its contents if it exists
+ * @throws IOException if IO error occurs
+ */
+ public static void deleteSitemaps() throws IOException {
+ File outputDir = new File(configurationService.getProperty("sitemap.dir"));
+ if (!outputDir.exists() && !outputDir.isDirectory()) {
+ log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
+ } else {
+ FileUtils.deleteDirectory(outputDir);
+ }
+ }
+
/**
* Generate sitemap.org protocol and/or basic HTML sitemaps.
*
@@ -150,14 +180,9 @@ public class GenerateSitemaps {
* @throws IOException if IO error
* if IO error occurs.
*/
- public static void generateSitemaps(boolean makeHTMLMap,
- boolean makeSitemapOrg) throws SQLException, IOException {
- String sitemapStem = configurationService.getProperty("dspace.ui.url")
- + "/sitemap";
- String htmlMapStem = configurationService.getProperty("dspace.ui.url")
- + "/htmlmap";
- String handleURLStem = configurationService.getProperty("dspace.ui.url")
- + "/handle/";
+ public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
+ String uiURLStem = configurationService.getProperty("dspace.ui.url");
+ String sitemapStem = uiURLStem + "/sitemap";
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.mkdir()) {
@@ -168,13 +193,11 @@ public class GenerateSitemaps {
AbstractGenerator sitemapsOrg = null;
if (makeHTMLMap) {
- html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=",
- null);
+ html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html");
}
if (makeSitemapOrg) {
- sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem
- + "?map=", null);
+ sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml");
}
Context c = new Context(Context.Mode.READ_ONLY);
@@ -182,7 +205,7 @@ public class GenerateSitemaps {
List comms = communityService.findAll(c);
for (Community comm : comms) {
- String url = handleURLStem + comm.getHandle();
+ String url = uiURLStem + "/communities/" + comm.getID();
if (makeHTMLMap) {
html.addURL(url, null);
@@ -197,7 +220,7 @@ public class GenerateSitemaps {
List colls = collectionService.findAll(c);
for (Collection coll : colls) {
- String url = handleURLStem + coll.getHandle();
+ String url = uiURLStem + "/collections/" + coll.getID();
if (makeHTMLMap) {
html.addURL(url, null);
@@ -214,7 +237,7 @@ public class GenerateSitemaps {
while (allItems.hasNext()) {
Item i = allItems.next();
- String url = handleURLStem + i.getHandle();
+ String url = uiURLStem + "/items/" + i.getID();
Date lastMod = i.getLastModified();
if (makeHTMLMap) {
diff --git a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java
index 9a0d5a6ba4..3ec4ca8239 100644
--- a/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java
+++ b/dspace-api/src/main/java/org/dspace/app/sitemap/SitemapsOrgGenerator.java
@@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
@Override
public String getFilename(int number) {
- return "sitemap" + number + ".xml.gz";
+ return "sitemap" + number + ".xml";
}
@Override
@@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
@Override
public boolean useCompression() {
- return true;
+ return false;
}
@Override
public String getIndexFilename() {
- return "sitemap_index.xml.gz";
+ return "sitemap_index.xml";
}
@Override
diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java
index 34bf4f5fc1..559b95edb8 100644
--- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java
@@ -57,7 +57,6 @@ import org.dspace.harvest.HarvestedCollection;
import org.dspace.harvest.service.HarvestedCollectionService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
import org.dspace.xmlworkflow.WorkflowConfigurationException;
-import org.dspace.xmlworkflow.XmlWorkflowFactoryImpl;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
import org.dspace.xmlworkflow.state.Workflow;
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
@@ -387,7 +386,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i
log.error(LogManager.getHeader(context, "setWorkflowGroup",
"collection_id=" + collection.getID() + " " + e.getMessage()), e);
}
- if (!StringUtils.equals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID())) {
+ if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
throw new IllegalArgumentException(
"setWorkflowGroup can be used only on collection with the default basic dspace workflow. "
+ "Instead, the collection: "
diff --git a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java
index 6886d41e1b..d33ad7e416 100644
--- a/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/DSpaceObjectServiceImpl.java
@@ -207,8 +207,8 @@ public abstract class DSpaceObjectServiceImpl implements
}
@Override
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- List values) throws SQLException {
+ public List addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, List values) throws SQLException {
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
if (metadataField == null) {
throw new SQLException(
@@ -216,12 +216,12 @@ public abstract class DSpaceObjectServiceImpl implements
"exist!");
}
- addMetadata(context, dso, metadataField, lang, values);
+ return addMetadata(context, dso, metadataField, lang, values);
}
@Override
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- List values, List authorities, List confidences)
+ public List addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, List values, List authorities, List confidences)
throws SQLException {
// We will not verify that they are valid entries in the registry
// until update() is called.
@@ -231,15 +231,16 @@ public abstract class DSpaceObjectServiceImpl implements
"bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " +
"exist!");
}
- addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
+ return addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
}
@Override
- public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List values,
- List authorities, List confidences)
+ public List addMetadata(Context context, T dso, MetadataField metadataField, String lang,
+ List values, List authorities, List confidences)
throws SQLException {
boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField);
boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField);
+ List newMetadata = new ArrayList<>(values.size());
// We will not verify that they are valid entries in the registry
// until update() is called.
for (int i = 0; i < values.size(); i++) {
@@ -250,6 +251,7 @@ public abstract class DSpaceObjectServiceImpl implements
}
}
MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField);
+ newMetadata.add(metadataValue);
//Set place to list length of all metadatavalues for the given schema.element.qualifier combination.
// Subtract one to adhere to the 0 as first element rule
metadataValue.setPlace(
@@ -304,29 +306,31 @@ public abstract class DSpaceObjectServiceImpl implements
// metadataValueService.update(context, metadataValue);
dso.addDetails(metadataField.toString());
}
+ return newMetadata;
}
@Override
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
- String authority, int confidence) throws SQLException {
- addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
- Arrays.asList(confidence));
+ public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
+ String value, String authority, int confidence) throws SQLException {
+ return addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
+ Arrays.asList(confidence)).get(0);
}
@Override
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- String value) throws SQLException {
- addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value));
+ public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, String value) throws SQLException {
+ return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)).get(0);
}
@Override
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
+ public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
throws SQLException {
- addMetadata(context, dso, metadataField, language, Arrays.asList(value));
+ return addMetadata(context, dso, metadataField, language, Arrays.asList(value)).get(0);
}
@Override
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List values)
+ public List addMetadata(Context context, T dso, MetadataField metadataField, String language,
+ List values)
throws SQLException {
if (metadataField != null) {
String fieldKey = metadataAuthorityService
@@ -343,18 +347,19 @@ public abstract class DSpaceObjectServiceImpl implements
getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i);
}
}
- addMetadata(context, dso, metadataField, language, values, authorities, confidences);
+ return addMetadata(context, dso, metadataField, language, values, authorities, confidences);
} else {
- addMetadata(context, dso, metadataField, language, values, null, null);
+ return addMetadata(context, dso, metadataField, language, values, null, null);
}
}
+ return new ArrayList<>(0);
}
@Override
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- String value, String authority, int confidence) throws SQLException {
- addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), Arrays.asList(authority),
- Arrays.asList(confidence));
+ public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, String value, String authority, int confidence) throws SQLException {
+ return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value),
+ Arrays.asList(authority), Arrays.asList(confidence)).get(0);
}
@Override
@@ -660,33 +665,35 @@ public abstract class DSpaceObjectServiceImpl implements
@Override
public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier,
String lang, String value, String authority, int confidence, int index)
- throws SQLException {
+ throws SQLException {
List list = getMetadata(dso, schema, element, qualifier);
- clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
-
int idx = 0;
+ int place = 0;
boolean last = true;
for (MetadataValue rr : list) {
if (idx == index) {
- addMetadata(context, dso, schema, element, qualifier,
- lang, value, authority, confidence);
+ MetadataValue newMetadata = addMetadata(context, dso, schema, element, qualifier,
+ lang, value, authority, confidence);
+
+ moveSingleMetadataValue(context, dso, place, newMetadata);
+ place++;
last = false;
}
- addMetadata(context, dso, schema, element, qualifier,
- rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
+ moveSingleMetadataValue(context, dso, place, rr);
+ place++;
idx++;
}
if (last) {
addMetadata(context, dso, schema, element, qualifier,
- lang, value, authority, confidence);
+ lang, value, authority, confidence);
}
}
@Override
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
- throws SQLException, IllegalArgumentException {
+ throws SQLException, IllegalArgumentException {
if (from == to) {
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
@@ -701,8 +708,6 @@ public abstract class DSpaceObjectServiceImpl implements
"\n Idx from:" + from + " Idx to: " + to);
}
- clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
-
int idx = 0;
MetadataValue moved = null;
for (MetadataValue md : list) {
@@ -714,49 +719,46 @@ public abstract class DSpaceObjectServiceImpl implements
}
idx = 0;
+ int place = 0;
boolean last = true;
for (MetadataValue rr : list) {
if (idx == to && to < from) {
- addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
- moved.getAuthority(), moved.getConfidence());
+ moveSingleMetadataValue(context, dso, place, moved);
+ place++;
last = false;
}
if (idx != from) {
- addMetadata(context, dso, schema, element, qualifier, rr.getLanguage(), rr.getValue(),
- rr.getAuthority(), rr.getConfidence());
+ moveSingleMetadataValue(context, dso, place, rr);
+ place++;
}
if (idx == to && to > from) {
- addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
- moved.getAuthority(), moved.getConfidence());
+ moveSingleMetadataValue(context, dso, place, moved);
+ place++;
last = false;
}
idx++;
}
if (last) {
- addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
- moved.getAuthority(), moved.getConfidence());
+ moveSingleMetadataValue(context, dso, place, moved);
}
}
+ /**
+ * Supports moving metadata by updating the place of the metadata value
+ */
+ protected void moveSingleMetadataValue(Context context, T dso, int place, MetadataValue rr) {
+ //just move the metadata
+ rr.setPlace(place);
+ }
+
@Override
public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
String value, String authority, int confidence, int index) throws SQLException {
List list = getMetadata(dso, schema, element, qualifier);
- clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
-
- int idx = 0;
- for (MetadataValue rr : list) {
- if (idx == index) {
- addMetadata(context, dso, schema, element, qualifier,
- lang, value, authority, confidence);
- } else {
- addMetadata(context, dso, schema, element, qualifier,
- rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
- }
- idx++;
- }
+ removeMetadataValues(context, dso, Arrays.asList(list.get(index)));
+ addAndShiftRightMetadata(context, dso, schema, element, qualifier, lang, value, authority, confidence, index);
}
@Override
diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java
index 9502a2ca32..00ab6df51e 100644
--- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java
@@ -1372,6 +1372,32 @@ prevent the generation of resource policy entry values with null dspace_object a
}
+ /**
+ * Supports moving metadata by adding the metadata value or updating the place of the relationship
+ */
+ @Override
+ protected void moveSingleMetadataValue(Context context, Item dso, int place, MetadataValue rr) {
+ if (rr instanceof RelationshipMetadataValue) {
+ try {
+ //Retrieve the applicable relationship
+ Relationship rs = relationshipService.find(context,
+ ((RelationshipMetadataValue) rr).getRelationshipId());
+ if (rs.getLeftItem() == dso) {
+ rs.setLeftPlace(place);
+ } else {
+ rs.setRightPlace(place);
+ }
+ relationshipService.update(context, rs);
+ } catch (Exception e) {
+ //should not occur, otherwise metadata can't be updated either
+ log.error("An error occurred while moving " + rr.getAuthority() + " for item " + dso.getID(), e);
+ }
+ } else {
+ //just move the metadata
+ rr.setPlace(place);
+ }
+ }
+
/**
* This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element,
* MetadataField Qualifier and MetadataField Place in that order.
diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java
index c71db2d131..569b5840c6 100644
--- a/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/MetadataFieldServiceImpl.java
@@ -9,6 +9,8 @@ package org.dspace.content;
import java.io.IOException;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import org.apache.commons.collections4.CollectionUtils;
@@ -20,8 +22,12 @@ import org.dspace.content.dao.MetadataFieldDAO;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.service.MetadataValueService;
+import org.dspace.content.service.SiteService;
+import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
+import org.dspace.discovery.indexobject.IndexableMetadataField;
+import org.dspace.event.Event;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -46,6 +52,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
protected MetadataValueService metadataValueService;
@Autowired(required = true)
protected MetadataSchemaService metadataSchemaService;
+ @Autowired
+ protected SiteService siteService;
protected MetadataFieldServiceImpl() {
@@ -77,6 +85,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
log.info(LogManager.getHeader(context, "create_metadata_field",
"metadata_field_id=" + metadataField.getID()));
+ // Update the index of type metadatafield
+ this.triggerEventToUpdateIndex(context, metadataField.getID());
return metadataField;
}
@@ -149,6 +159,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
"metadata_field_id=" + metadataField.getID() + "element=" + metadataField
.getElement()
+ "qualifier=" + metadataField.getQualifier()));
+ // Update the index of type metadatafield
+ this.triggerEventToUpdateIndex(context, metadataField.getID());
}
@Override
@@ -177,6 +189,21 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
log.info(LogManager.getHeader(context, "delete_metadata_field",
"metadata_field_id=" + metadataField.getID()));
+ // Update the index of type metadatafield
+ this.triggerEventToUpdateIndex(context, metadataField.getID());
+ }
+
+ /**
+ * Calls a MODIFY SITE event with the identifier of the changed mdField, so it can be indexed in
+ * {@link org.dspace.discovery.IndexEventConsumer}, with type of {@link org.dspace.discovery.IndexableObject} in
+ * {@link Event}.detail and the identifiers of the changed mdFields in {@link Event}.identifiers
+ *
+ * @param context DSpace context
+ * @param mdFieldId ID of the metadata field that needs to be (re)indexed
+ */
+ private void triggerEventToUpdateIndex(Context context, int mdFieldId) {
+ context.addEvent(new Event(Event.MODIFY, Constants.SITE, null, IndexableMetadataField.TYPE, new ArrayList<>(
+ Arrays.asList(Integer.toString(mdFieldId)))));
}
/**
diff --git a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java
index 88d2e38beb..637d1c094b 100644
--- a/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java
+++ b/dspace-api/src/main/java/org/dspace/content/RelationshipMetadataValue.java
@@ -7,6 +7,8 @@
*/
package org.dspace.content;
+import org.dspace.core.Constants;
+
/**
* This class is used as a representation of MetadataValues for the MetadataValues that are derived from the
* Relationships that the item has. This includes the useForPlace property which we'll have to use to determine
@@ -57,4 +59,13 @@ public class RelationshipMetadataValue extends MetadataValue {
}
return super.equals(obj);
}
+
+ /**
+ * Retrieves the Relationship ID from which the current RelationshipMetadataValue is derived
+ *
+ * @return the relationship ID
+ */
+ public int getRelationshipId() {
+ return Integer.parseInt(getAuthority().substring(Constants.VIRTUAL_AUTHORITY_PREFIX.length()));
+ }
}
diff --git a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java
index c45f6c737c..0d5ba53794 100644
--- a/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java
+++ b/dspace-api/src/main/java/org/dspace/content/WorkspaceItemServiceImpl.java
@@ -265,7 +265,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
// Need to delete the workspaceitem row first since it refers
// to item ID
- workspaceItem.getSupervisorGroups().clear();
+ try {
+ workspaceItem.getSupervisorGroups().clear();
+ } catch (Exception e) {
+ log.error("failed to clear supervisor group", e);
+ }
+
workspaceItemDAO.delete(context, workspaceItem);
}
diff --git a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java
index 203d2a1787..ff44713b38 100644
--- a/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java
+++ b/dspace-api/src/main/java/org/dspace/content/service/DSpaceObjectService.java
@@ -200,10 +200,11 @@ public interface DSpaceObjectService {
* and the ISO3166 country code. null
means the
* value has no language (for example, a date).
* @param values the values to add.
+ * @return the list of MetadataValues added to the object
* @throws SQLException if database error
*/
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- List values) throws SQLException;
+ public List addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, List values) throws SQLException;
/**
* Add metadata fields. These are appended to existing values.
@@ -223,10 +224,11 @@ public interface DSpaceObjectService {
* @param values the values to add.
* @param authorities the external authority key for this value (or null)
* @param confidences the authority confidence (default 0)
+ * @return the list of MetadataValues added to the object
* @throws SQLException if database error
*/
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- List values, List authorities, List confidences)
+ public List addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, List values, List authorities, List confidences)
throws SQLException;
/**
@@ -243,32 +245,64 @@ public interface DSpaceObjectService {
* @param values the values to add.
* @param authorities the external authority key for this value (or null)
* @param confidences the authority confidence (default 0)
+ * @return the list of MetadataValues added to the object
* @throws SQLException if database error
*/
- public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List values,
- List authorities, List confidences) throws SQLException;
+ public List addMetadata(Context context, T dso, MetadataField metadataField, String lang,
+ List values, List authorities, List confidences) throws SQLException;
/**
* Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single
* value need to be added
- *
- * @param context
- * @param dso
- * @param metadataField
- * @param language
- * @param value
- * @param authority
- * @param confidence
+ *
+ * @param context DSpace context
+ * @param dso DSpaceObject
+ * @param metadataField the metadata field to which the value is to be set
+ * @param language the ISO639 language code, optionally followed by an underscore
+ * and the ISO3166 country code. null
means the
+ * value has no language (for example, a date).
+ * @param value the value to add.
+ * @param authority the external authority key for this value (or null)
+ * @param confidence the authority confidence (default 0)
+ * @return the MetadataValue added ot the object
* @throws SQLException
*/
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
- String authority, int confidence) throws SQLException;
+ public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
+ String value, String authority, int confidence) throws SQLException;
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
+ /**
+ * Add a metadatafield. These are appended to existing values.
+ * Use clearMetadata
to remove values.
+ *
+ * @param context DSpace context
+ * @param dso DSpaceObject
+ * @param metadataField the metadata field to which the value is to be set
+ * @param language the ISO639 language code, optionally followed by an underscore
+ * and the ISO3166 country code. null
means the
+ * value has no language (for example, a date).
+ * @param value the value to add.
+ * @return the MetadataValue added ot the object
+ * @throws SQLException if database error
+ */
+ public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
throws SQLException;
- public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List values)
- throws SQLException;
+ /**
+ * Add a metadatafields. These are appended to existing values.
+ * Use clearMetadata
to remove values.
+ *
+ * @param context DSpace context
+ * @param dso DSpaceObject
+ * @param metadataField the metadata field to which the value is to be set
+ * @param language the ISO639 language code, optionally followed by an underscore
+ * and the ISO3166 country code. null
means the
+ * value has no language (for example, a date).
+ * @param values the values to add.
+ * @return the list of MetadataValues added to the object
+ * @throws SQLException if database error
+ */
+ public List addMetadata(Context context, T dso, MetadataField metadataField, String language,
+ List values) throws SQLException;
/**
* Add a single metadata field. This is appended to existing
@@ -285,10 +319,11 @@ public interface DSpaceObjectService {
* and the ISO3166 country code. null
means the
* value has no language (for example, a date).
* @param value the value to add.
+ * @return the MetadataValue added ot the object
* @throws SQLException if database error
*/
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- String value) throws SQLException;
+ public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, String value) throws SQLException;
/**
* Add a single metadata field. This is appended to existing
@@ -307,10 +342,11 @@ public interface DSpaceObjectService {
* @param value the value to add.
* @param authority the external authority key for this value (or null)
* @param confidence the authority confidence (default 0)
+ * @return the MetadataValue added ot the object
* @throws SQLException if database error
*/
- public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
- String value, String authority, int confidence) throws SQLException;
+ public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
+ String lang, String value, String authority, int confidence) throws SQLException;
/**
* Clear metadata values. As with getDC
above,
diff --git a/dspace-api/src/main/java/org/dspace/curate/Curation.java b/dspace-api/src/main/java/org/dspace/curate/Curation.java
new file mode 100644
index 0000000000..44cbb24ed9
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/curate/Curation.java
@@ -0,0 +1,371 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.curate;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.PrintStream;
+import java.io.Writer;
+import java.sql.SQLException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.output.NullOutputStream;
+import org.dspace.authorize.AuthorizeException;
+import org.dspace.content.DSpaceObject;
+import org.dspace.content.factory.ContentServiceFactory;
+import org.dspace.core.Context;
+import org.dspace.core.factory.CoreServiceFactory;
+import org.dspace.curate.factory.CurateServiceFactory;
+import org.dspace.eperson.EPerson;
+import org.dspace.eperson.factory.EPersonServiceFactory;
+import org.dspace.eperson.service.EPersonService;
+import org.dspace.handle.factory.HandleServiceFactory;
+import org.dspace.handle.service.HandleService;
+import org.dspace.scripts.DSpaceRunnable;
+import org.dspace.utils.DSpace;
+
+/**
+ * CurationCli provides command-line access to Curation tools and processes.
+ *
+ * @author richardrodgers
+ */
+public class Curation extends DSpaceRunnable {
+
+ protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
+
+ protected Context context;
+ private CurationClientOptions curationClientOptions;
+
+ private String task;
+ private String taskFile;
+ private String id;
+ private String queue;
+ private String scope;
+ private String reporter;
+ private Map parameters;
+ private boolean verbose;
+
+ @Override
+ public void internalRun() throws Exception {
+ if (curationClientOptions == CurationClientOptions.HELP) {
+ printHelp();
+ return;
+ }
+
+ Curator curator = initCurator();
+
+ // load curation tasks
+ if (curationClientOptions == CurationClientOptions.TASK) {
+ long start = System.currentTimeMillis();
+ handleCurationTask(curator);
+ this.endScript(start);
+ }
+
+ // process task queue
+ if (curationClientOptions == CurationClientOptions.QUEUE) {
+ // process the task queue
+ TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
+ .getSinglePlugin(TaskQueue.class);
+ if (taskQueue == null) {
+ super.handler.logError("No implementation configured for queue");
+ throw new UnsupportedOperationException("No queue service available");
+ }
+ long timeRun = this.runQueue(taskQueue, curator);
+ this.endScript(timeRun);
+ }
+ }
+
+ /**
+ * Does the curation task (-t) or the task in the given file (-T).
+ * Checks:
+ * - if required option -i is missing.
+ * - if option -t has a valid task option
+ */
+ private void handleCurationTask(Curator curator) throws IOException, SQLException {
+ String taskName;
+ if (commandLine.hasOption('t')) {
+ if (verbose) {
+ handler.logInfo("Adding task: " + this.task);
+ }
+ curator.addTask(this.task);
+ if (verbose && !curator.hasTask(this.task)) {
+ handler.logInfo("Task: " + this.task + " not resolved");
+ }
+ } else if (commandLine.hasOption('T')) {
+ // load taskFile
+ BufferedReader reader = null;
+ try {
+ reader = new BufferedReader(new FileReader(this.taskFile));
+ while ((taskName = reader.readLine()) != null) {
+ if (verbose) {
+ super.handler.logInfo("Adding task: " + taskName);
+ }
+ curator.addTask(taskName);
+ }
+ } finally {
+ if (reader != null) {
+ reader.close();
+ }
+ }
+ }
+ // run tasks against object
+ if (verbose) {
+ super.handler.logInfo("Starting curation");
+ super.handler.logInfo("Curating id: " + this.id);
+ }
+ if ("all".equals(this.id)) {
+ // run on whole Site
+ curator.curate(context,
+ ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
+ } else {
+ curator.curate(context, this.id);
+ }
+ }
+
+ /**
+ * Runs task queue (-q set)
+ *
+ * @param queue The task queue
+ * @param curator The curator
+ * @return Time when queue started
+ */
+ private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
+ // use current time as our reader 'ticket'
+ long ticket = System.currentTimeMillis();
+ Iterator entryIter = queue.dequeue(this.queue, ticket).iterator();
+ while (entryIter.hasNext()) {
+ TaskQueueEntry entry = entryIter.next();
+ if (verbose) {
+ super.handler.logInfo("Curating id: " + entry.getObjectId());
+ }
+ curator.clear();
+ // does entry relate to a DSO or workflow object?
+ if (entry.getObjectId().indexOf('/') > 0) {
+ for (String taskName : entry.getTaskNames()) {
+ curator.addTask(taskName);
+ }
+ curator.curate(context, entry.getObjectId());
+ } else {
+ // make eperson who queued task the effective user
+ EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
+ if (agent != null) {
+ context.setCurrentUser(agent);
+ }
+ CurateServiceFactory.getInstance().getWorkflowCuratorService()
+ .curate(curator, context, entry.getObjectId());
+ }
+ }
+ queue.release(this.queue, ticket, true);
+ return ticket;
+ }
+
+ /**
+ * End of curation script; logs script time if -v verbose is set
+ *
+ * @param timeRun Time script was started
+ * @throws SQLException If DSpace contextx can't complete
+ */
+ private void endScript(long timeRun) throws SQLException {
+ context.complete();
+ if (verbose) {
+ long elapsed = System.currentTimeMillis() - timeRun;
+ this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
+ }
+ }
+
+ /**
+ * Initialize the curator with command line variables
+ *
+ * @return Initialised curator
+ * @throws FileNotFoundException If file of command line variable -r reporter is not found
+ */
+ private Curator initCurator() throws FileNotFoundException {
+ Curator curator = new Curator();
+ OutputStream reporterStream;
+ if (null == this.reporter) {
+ reporterStream = new NullOutputStream();
+ } else if ("-".equals(this.reporter)) {
+ reporterStream = System.out;
+ } else {
+ reporterStream = new PrintStream(this.reporter);
+ }
+ Writer reportWriter = new OutputStreamWriter(reporterStream);
+ curator.setReporter(reportWriter);
+
+ if (this.scope != null) {
+ Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
+ curator.setTransactionScope(txScope);
+ }
+
+ curator.addParameters(parameters);
+ // we are operating in batch mode, if anyone cares.
+ curator.setInvoked(Curator.Invoked.BATCH);
+ return curator;
+ }
+
+ @Override
+ public void printHelp() {
+ super.printHelp();
+ super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
+ super.handler.logInfo("single item: CurationCli -t generate -i itemId");
+ super.handler.logInfo("task queue: CurationCli -q monthly");
+ }
+
+ @Override
+ public CurationScriptConfiguration getScriptConfiguration() {
+ return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
+ }
+
+ @Override
+ public void setup() throws ParseException {
+ assignCurrentUserInContext();
+ this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
+
+ if (this.curationClientOptions != null) {
+ this.initGeneralLineOptionsAndCheckIfValid();
+ if (curationClientOptions == CurationClientOptions.TASK) {
+ this.initTaskLineOptionsAndCheckIfValid();
+ } else if (curationClientOptions == CurationClientOptions.QUEUE) {
+ this.queue = this.commandLine.getOptionValue('q');
+ }
+ } else {
+ throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
+ " specified");
+ }
+ }
+
+ /**
+ * This method will assign the currentUser to the {@link Context} variable which is also created in this method.
+ * The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
+ * was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
+ * and this {@link EPerson} will be set as the currentUser of the created {@link Context}
+ * @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
+ */
+ protected void assignCurrentUserInContext() throws ParseException {
+ UUID currentUserUuid = this.getEpersonIdentifier();
+ try {
+ this.context = new Context(Context.Mode.BATCH_EDIT);
+ EPerson eperson = ePersonService.find(context, currentUserUuid);
+ if (eperson == null) {
+ super.handler.logError("EPerson not found: " + currentUserUuid);
+ throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
+ }
+ this.context.setCurrentUser(eperson);
+ } catch (SQLException e) {
+ handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
+ }
+ }
+
+ /**
+ * Fills in some optional command line options.
+ * Checks if there are missing required options or invalid values for options.
+ */
+ private void initGeneralLineOptionsAndCheckIfValid() {
+ // report file
+ if (this.commandLine.hasOption('r')) {
+ this.reporter = this.commandLine.getOptionValue('r');
+ }
+
+ // parameters
+ this.parameters = new HashMap<>();
+ if (this.commandLine.hasOption('p')) {
+ for (String parameter : this.commandLine.getOptionValues('p')) {
+ String[] parts = parameter.split("=", 2);
+ String name = parts[0].trim();
+ String value;
+ if (parts.length > 1) {
+ value = parts[1].trim();
+ } else {
+ value = "true";
+ }
+ this.parameters.put(name, value);
+ }
+ }
+
+ // verbose
+ verbose = false;
+ if (commandLine.hasOption('v')) {
+ verbose = true;
+ }
+
+ // scope
+ if (this.commandLine.getOptionValue('s') != null) {
+ this.scope = this.commandLine.getOptionValue('s');
+ if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
+ this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
+ "'open' recognized");
+ throw new IllegalArgumentException(
+ "Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
+ "'open' recognized");
+ }
+ }
+ }
+
+ /**
+ * Fills in required command line options for the task or taskFile option.
+ * Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
+ * Checks if -t task has a valid task option.
+ * Checks if -T taskfile is a valid file.
+ */
+ private void initTaskLineOptionsAndCheckIfValid() {
+ // task or taskFile
+ if (this.commandLine.hasOption('t')) {
+ this.task = this.commandLine.getOptionValue('t');
+ if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
+ super.handler
+ .logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
+ throw new IllegalArgumentException(
+ "-t task must be one of: " + CurationClientOptions.getTaskOptions());
+ }
+ } else if (this.commandLine.hasOption('T')) {
+ this.taskFile = this.commandLine.getOptionValue('T');
+ if (!(new File(this.taskFile).isFile())) {
+ super.handler
+ .logError("-T taskFile must be valid file: " + this.taskFile);
+ throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
+ }
+ }
+
+ if (this.commandLine.hasOption('i')) {
+ this.id = this.commandLine.getOptionValue('i').toLowerCase();
+ if (!this.id.equalsIgnoreCase("all")) {
+ HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
+ DSpaceObject dso;
+ try {
+ dso = handleService.resolveToObject(this.context, id);
+ } catch (SQLException e) {
+ super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
+ throw new IllegalArgumentException(
+ "SQLException trying to resolve handle " + id + " to a valid dso");
+ }
+ if (dso == null) {
+ super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
+ "not be resolved to valid dso handle");
+ throw new IllegalArgumentException(
+ "Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
+ "not be resolved to valid dso handle");
+ }
+ }
+ } else {
+ super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
+ "help)");
+ throw new IllegalArgumentException(
+ "Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
+ "help)");
+ }
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCli.java b/dspace-api/src/main/java/org/dspace/curate/CurationCli.java
index 8f5d91cc1c..f70aea5b1d 100644
--- a/dspace-api/src/main/java/org/dspace/curate/CurationCli.java
+++ b/dspace-api/src/main/java/org/dspace/curate/CurationCli.java
@@ -7,229 +7,27 @@
*/
package org.dspace.curate;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.PrintStream;
-import java.io.Writer;
import java.sql.SQLException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import org.apache.commons.io.output.NullOutputStream;
-import org.dspace.authorize.AuthorizeException;
-import org.dspace.content.DSpaceObject;
-import org.dspace.content.factory.ContentServiceFactory;
+import org.apache.commons.cli.ParseException;
import org.dspace.core.Context;
-import org.dspace.core.factory.CoreServiceFactory;
-import org.dspace.curate.factory.CurateServiceFactory;
import org.dspace.eperson.EPerson;
-import org.dspace.eperson.factory.EPersonServiceFactory;
-import org.dspace.eperson.service.EPersonService;
-import org.dspace.handle.factory.HandleServiceFactory;
-import org.dspace.handle.service.HandleService;
-import org.dspace.scripts.DSpaceRunnable;
-import org.dspace.utils.DSpace;
/**
- * CurationCli provides command-line access to Curation tools and processes.
- *
- * @author richardrodgers
+ * This is the CLI version of the {@link Curation} script.
+ * This will only be called when the curate script is called from a commandline instance.
*/
-public class CurationCli extends DSpaceRunnable {
-
- private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
-
- private Context context;
- private CurationClientOptions curationClientOptions;
-
- private String task;
- private String taskFile;
- private String id;
- private String queue;
- private String scope;
- private String reporter;
- private Map parameters;
- private boolean verbose;
-
- @Override
- public void internalRun() throws Exception {
- if (curationClientOptions == CurationClientOptions.HELP) {
- printHelp();
- return;
- }
-
- Curator curator = initCurator();
-
- // load curation tasks
- if (curationClientOptions == CurationClientOptions.TASK) {
- long start = System.currentTimeMillis();
- handleCurationTask(curator);
- this.endScript(start);
- }
-
- // process task queue
- if (curationClientOptions == CurationClientOptions.QUEUE) {
- // process the task queue
- TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
- .getSinglePlugin(TaskQueue.class);
- if (taskQueue == null) {
- super.handler.logError("No implementation configured for queue");
- throw new UnsupportedOperationException("No queue service available");
- }
- long timeRun = this.runQueue(taskQueue, curator);
- this.endScript(timeRun);
- }
- }
+public class CurationCli extends Curation {
/**
- * Does the curation task (-t) or the task in the given file (-T).
- * Checks:
- * - if required option -i is missing.
- * - if option -t has a valid task option
+ * This is the overridden instance of the {@link Curation#assignCurrentUserInContext()} method in the parent class
+ * {@link Curation}.
+ * This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
+ * with the parameters of the Script.
+ * @throws ParseException If the e flag was not given to the parameters when calling the script
*/
- private void handleCurationTask(Curator curator) throws IOException, SQLException {
- String taskName;
- if (commandLine.hasOption('t')) {
- if (verbose) {
- handler.logInfo("Adding task: " + this.task);
- }
- curator.addTask(this.task);
- if (verbose && !curator.hasTask(this.task)) {
- handler.logInfo("Task: " + this.task + " not resolved");
- }
- } else if (commandLine.hasOption('T')) {
- // load taskFile
- BufferedReader reader = null;
- try {
- reader = new BufferedReader(new FileReader(this.taskFile));
- while ((taskName = reader.readLine()) != null) {
- if (verbose) {
- super.handler.logInfo("Adding task: " + taskName);
- }
- curator.addTask(taskName);
- }
- } finally {
- if (reader != null) {
- reader.close();
- }
- }
- }
- // run tasks against object
- if (verbose) {
- super.handler.logInfo("Starting curation");
- super.handler.logInfo("Curating id: " + this.id);
- }
- if ("all".equals(this.id)) {
- // run on whole Site
- curator.curate(context,
- ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
- } else {
- curator.curate(context, this.id);
- }
- }
-
- /**
- * Runs task queue (-q set)
- *
- * @param queue The task queue
- * @param curator The curator
- * @return Time when queue started
- */
- private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
- // use current time as our reader 'ticket'
- long ticket = System.currentTimeMillis();
- Iterator entryIter = queue.dequeue(this.queue, ticket).iterator();
- while (entryIter.hasNext()) {
- TaskQueueEntry entry = entryIter.next();
- if (verbose) {
- super.handler.logInfo("Curating id: " + entry.getObjectId());
- }
- curator.clear();
- // does entry relate to a DSO or workflow object?
- if (entry.getObjectId().indexOf('/') > 0) {
- for (String taskName : entry.getTaskNames()) {
- curator.addTask(taskName);
- }
- curator.curate(context, entry.getObjectId());
- } else {
- // make eperson who queued task the effective user
- EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
- if (agent != null) {
- context.setCurrentUser(agent);
- }
- CurateServiceFactory.getInstance().getWorkflowCuratorService()
- .curate(curator, context, entry.getObjectId());
- }
- }
- queue.release(this.queue, ticket, true);
- return ticket;
- }
-
- /**
- * End of curation script; logs script time if -v verbose is set
- *
- * @param timeRun Time script was started
- * @throws SQLException If DSpace contextx can't complete
- */
- private void endScript(long timeRun) throws SQLException {
- context.complete();
- if (verbose) {
- long elapsed = System.currentTimeMillis() - timeRun;
- this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
- }
- }
-
- /**
- * Initialize the curator with command line variables
- *
- * @return Initialised curator
- * @throws FileNotFoundException If file of command line variable -r reporter is not found
- */
- private Curator initCurator() throws FileNotFoundException {
- Curator curator = new Curator();
- OutputStream reporterStream;
- if (null == this.reporter) {
- reporterStream = new NullOutputStream();
- } else if ("-".equals(this.reporter)) {
- reporterStream = System.out;
- } else {
- reporterStream = new PrintStream(this.reporter);
- }
- Writer reportWriter = new OutputStreamWriter(reporterStream);
- curator.setReporter(reportWriter);
-
- if (this.scope != null) {
- Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
- curator.setTransactionScope(txScope);
- }
-
- curator.addParameters(parameters);
- // we are operating in batch mode, if anyone cares.
- curator.setInvoked(Curator.Invoked.BATCH);
- return curator;
- }
-
@Override
- public void printHelp() {
- super.printHelp();
- super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
- super.handler.logInfo("single item: CurationCli -t generate -i itemId");
- super.handler.logInfo("task queue: CurationCli -q monthly");
- }
-
- @Override
- public CurationScriptConfiguration getScriptConfiguration() {
- return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
- }
-
- @Override
- public void setup() {
+ protected void assignCurrentUserInContext() throws ParseException {
if (this.commandLine.hasOption('e')) {
String ePersonEmail = this.commandLine.getOptionValue('e');
this.context = new Context(Context.Mode.BATCH_EDIT);
@@ -244,119 +42,7 @@ public class CurationCli extends DSpaceRunnable {
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
}
} else {
- throw new IllegalArgumentException("Needs an -e to set eperson (admin)");
- }
- this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
-
- if (this.curationClientOptions != null) {
- this.initGeneralLineOptionsAndCheckIfValid();
- if (curationClientOptions == CurationClientOptions.TASK) {
- this.initTaskLineOptionsAndCheckIfValid();
- } else if (curationClientOptions == CurationClientOptions.QUEUE) {
- this.queue = this.commandLine.getOptionValue('q');
- }
- } else {
- throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
- " specified");
- }
- }
-
- /**
- * Fills in some optional command line options.
- * Checks if there are missing required options or invalid values for options.
- */
- private void initGeneralLineOptionsAndCheckIfValid() {
- // report file
- if (this.commandLine.hasOption('r')) {
- this.reporter = this.commandLine.getOptionValue('r');
- }
-
- // parameters
- this.parameters = new HashMap<>();
- if (this.commandLine.hasOption('p')) {
- for (String parameter : this.commandLine.getOptionValues('p')) {
- String[] parts = parameter.split("=", 2);
- String name = parts[0].trim();
- String value;
- if (parts.length > 1) {
- value = parts[1].trim();
- } else {
- value = "true";
- }
- this.parameters.put(name, value);
- }
- }
-
- // verbose
- verbose = false;
- if (commandLine.hasOption('v')) {
- verbose = true;
- }
-
- // scope
- if (this.commandLine.getOptionValue('s') != null) {
- this.scope = this.commandLine.getOptionValue('s');
- if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
- this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
- "'open' recognized");
- throw new IllegalArgumentException(
- "Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
- "'open' recognized");
- }
- }
- }
-
- /**
- * Fills in required command line options for the task or taskFile option.
- * Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
- * Checks if -t task has a valid task option.
- * Checks if -T taskfile is a valid file.
- */
- private void initTaskLineOptionsAndCheckIfValid() {
- // task or taskFile
- if (this.commandLine.hasOption('t')) {
- this.task = this.commandLine.getOptionValue('t');
- if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
- super.handler
- .logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
- throw new IllegalArgumentException(
- "-t task must be one of: " + CurationClientOptions.getTaskOptions());
- }
- } else if (this.commandLine.hasOption('T')) {
- this.taskFile = this.commandLine.getOptionValue('T');
- if (!(new File(this.taskFile).isFile())) {
- super.handler
- .logError("-T taskFile must be valid file: " + this.taskFile);
- throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
- }
- }
-
- if (this.commandLine.hasOption('i')) {
- this.id = this.commandLine.getOptionValue('i').toLowerCase();
- if (!this.id.equalsIgnoreCase("all")) {
- HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
- DSpaceObject dso;
- try {
- dso = handleService.resolveToObject(this.context, id);
- } catch (SQLException e) {
- super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
- throw new IllegalArgumentException(
- "SQLException trying to resolve handle " + id + " to a valid dso");
- }
- if (dso == null) {
- super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
- "not be resolved to valid dso handle");
- throw new IllegalArgumentException(
- "Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
- "not be resolved to valid dso handle");
- }
- }
- } else {
- super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
- "help)");
- throw new IllegalArgumentException(
- "Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
- "help)");
+ throw new ParseException("Required parameter -e missing!");
}
}
}
diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java
new file mode 100644
index 0000000000..5e1d014873
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/curate/CurationCliScriptConfiguration.java
@@ -0,0 +1,26 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.curate;
+
+import org.apache.commons.cli.Options;
+
+/**
+ * This is the CLI version of the {@link CurationScriptConfiguration} class that handles the configuration for the
+ * {@link CurationCli} script
+ */
+public class CurationCliScriptConfiguration extends CurationScriptConfiguration {
+
+ @Override
+ public Options getOptions() {
+ options = super.getOptions();
+ options.addOption("e", "eperson", true, "email address of curating eperson");
+ options.getOption("e").setType(String.class);
+ options.getOption("e").setRequired(true);
+ return options;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java b/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java
index 7daf107aad..8ec0f14697 100644
--- a/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java
+++ b/dspace-api/src/main/java/org/dspace/curate/CurationClientOptions.java
@@ -45,6 +45,11 @@ public enum CurationClientOptions {
return null;
}
+ /**
+ * This method will create all the possible Options for the {@link Curation} script.
+ * This will be used by {@link CurationScriptConfiguration}
+ * @return The options for the {@link Curation} script
+ */
protected static Options constructOptions() {
Options options = new Options();
@@ -54,7 +59,6 @@ public enum CurationClientOptions {
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
options.addOption("q", "queue", true, "name of task queue to process");
- options.addOption("e", "eperson", true, "email address of curating eperson");
options.addOption("r", "reporter", true,
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
"reporting");
diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java
index 785926908e..fefb4eb768 100644
--- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java
+++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java
@@ -16,11 +16,11 @@ import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/**
- * The {@link ScriptConfiguration} for the {@link CurationCli} script
+ * The {@link ScriptConfiguration} for the {@link Curation} script
*
* @author Maria Verdonck (Atmire) on 23/06/2020
*/
-public class CurationScriptConfiguration extends ScriptConfiguration {
+public class CurationScriptConfiguration extends ScriptConfiguration {
@Autowired
private AuthorizeService authorizeService;
diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java
index 43ea9eefb2..195c9cd6fc 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/IndexEventConsumer.java
@@ -8,6 +8,7 @@
package org.dspace.discovery;
import java.util.HashSet;
+import java.util.Optional;
import java.util.Set;
import org.apache.logging.log4j.Logger;
@@ -15,6 +16,7 @@ import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
+import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
@@ -67,7 +69,7 @@ public class IndexEventConsumer implements Consumer {
int st = event.getSubjectType();
if (!(st == Constants.ITEM || st == Constants.BUNDLE
- || st == Constants.COLLECTION || st == Constants.COMMUNITY)) {
+ || st == Constants.COLLECTION || st == Constants.COMMUNITY || st == Constants.SITE)) {
log
.warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: "
+ event.toString());
@@ -104,10 +106,28 @@ public class IndexEventConsumer implements Consumer {
case Event.MODIFY:
case Event.MODIFY_METADATA:
if (subject == null) {
- log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ if (st == Constants.SITE) {
+ // Update the indexable objects of type in event.detail of objects with ids in event.identifiers
+ for (String id : event.getIdentifiers()) {
+ IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance().
+ getIndexFactoryByType(event.getDetail());
+ Optional indexableObject = Optional.empty();
+ indexableObject = indexableObjectService.findIndexableObject(ctx, id);
+ if (indexableObject.isPresent()) {
+ log.debug("consume() adding event to update queue: " + event.toString());
+ objectsToUpdate
+ .addAll(indexObjectServiceFactory
+ .getIndexableObjects(ctx, indexableObject.get().getIndexedObject()));
+ } else {
+ log.warn("Cannot resolve " + id);
+ }
+ }
+ } else {
+ log.warn(event.getEventTypeAsString() + " event, could not get object for "
+ event.getSubjectTypeAsString() + " id="
+ event.getSubjectID()
+ ", perhaps it has been deleted.");
+ }
} else {
log.debug("consume() adding event to update queue: " + event.toString());
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java
index ca1423e593..2e4eb67723 100644
--- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java
@@ -12,6 +12,7 @@ import java.sql.SQLException;
import java.util.Date;
import java.util.List;
+import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
@@ -56,7 +57,7 @@ public abstract class IndexFactoryImpl implements
doc.addField(SearchUtils.RESOURCE_ID_FIELD, indexableObject.getID().toString());
//Do any additional indexing, depends on the plugins
- for (SolrServiceIndexPlugin solrServiceIndexPlugin : solrServiceIndexPlugins) {
+ for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) {
solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc);
}
@@ -190,4 +191,4 @@ public abstract class IndexFactoryImpl implements
public void deleteAll() throws IOException, SolrServerException {
solrSearchCore.getSolr().deleteByQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + getType());
}
-}
\ No newline at end of file
+}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java
new file mode 100644
index 0000000000..ed44e8eebe
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexableMetadataField.java
@@ -0,0 +1,51 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.discovery.indexobject;
+
+import org.dspace.content.MetadataField;
+import org.dspace.discovery.IndexableObject;
+
+/**
+ * {@link MetadataField} implementation for the {@link IndexableObject}
+ *
+ * @author Maria Verdonck (Atmire) on 14/07/2020
+ */
+public class IndexableMetadataField implements IndexableObject {
+
+ private MetadataField metadataField;
+ public static final String TYPE = MetadataField.class.getSimpleName();
+
+ public IndexableMetadataField(MetadataField metadataField) {
+ this.metadataField = metadataField;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ @Override
+ public Integer getID() {
+ return this.metadataField.getID();
+ }
+
+ @Override
+ public MetadataField getIndexedObject() {
+ return this.metadataField;
+ }
+
+ @Override
+ public void setIndexedObject(MetadataField metadataField) {
+ this.metadataField = metadataField;
+ }
+
+ @Override
+ public String getTypeText() {
+ return TYPE.toUpperCase();
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java
new file mode 100644
index 0000000000..518a8ff145
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java
@@ -0,0 +1,109 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.discovery.indexobject;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.solr.common.SolrInputDocument;
+import org.dspace.content.MetadataField;
+import org.dspace.content.service.MetadataFieldService;
+import org.dspace.core.Context;
+import org.dspace.discovery.indexobject.factory.MetadataFieldIndexFactory;
+import org.dspace.eperson.Group;
+import org.dspace.eperson.factory.EPersonServiceFactory;
+import org.dspace.eperson.service.GroupService;
+import org.springframework.beans.factory.annotation.Autowired;
+
+/**
+ * Factory implementation for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
+ *
+ * @author Maria Verdonck (Atmire) on 14/07/2020
+ */
+public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl
+ implements MetadataFieldIndexFactory {
+
+ public static final String SCHEMA_FIELD_NAME = "schema";
+ public static final String ELEMENT_FIELD_NAME = "element";
+ public static final String QUALIFIER_FIELD_NAME = "qualifier";
+ public static final String FIELD_NAME_VARIATIONS = "fieldName";
+
+ protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
+
+ @Override
+ public SolrInputDocument buildDocument(Context context, IndexableMetadataField indexableObject) throws SQLException,
+ IOException {
+ // Add the ID's, types and call the SolrServiceIndexPlugins
+ final SolrInputDocument doc = super.buildDocument(context, indexableObject);
+ final MetadataField metadataField = indexableObject.getIndexedObject();
+ // add schema, element, qualifier and full fieldName
+ addFacetIndex(doc, SCHEMA_FIELD_NAME, metadataField.getMetadataSchema().getName(),
+ metadataField.getMetadataSchema().getName());
+ addFacetIndex(doc, ELEMENT_FIELD_NAME, metadataField.getElement(), metadataField.getElement());
+ String fieldName = metadataField.toString().replace('_', '.');
+ addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName, fieldName);
+ if (StringUtils.isNotBlank(metadataField.getQualifier())) {
+ addFacetIndex(doc, QUALIFIER_FIELD_NAME, metadataField.getQualifier(), metadataField.getQualifier());
+ addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName,
+ metadataField.getElement() + "." + metadataField.getQualifier());
+ addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getQualifier(), metadataField.getQualifier());
+ } else {
+ addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getElement(), metadataField.getElement());
+ }
+ addNamedResourceTypeIndex(doc, indexableObject.getTypeText());
+ Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
+ // add read permission on doc for anonymous group
+ doc.addField("read", "g" + anonymousGroup.getID());
+ return doc;
+ }
+
+ @Autowired
+ private MetadataFieldService metadataFieldService;
+
+ @Override
+ public Iterator findAll(Context context) throws SQLException {
+ final Iterator metadataFields = metadataFieldService.findAll(context).iterator();
+ return new Iterator<>() {
+ @Override
+ public boolean hasNext() {
+ return metadataFields.hasNext();
+ }
+
+ @Override
+ public IndexableMetadataField next() {
+ return new IndexableMetadataField(metadataFields.next());
+ }
+ };
+ }
+
+ @Override
+ public String getType() {
+ return IndexableMetadataField.TYPE;
+ }
+
+ @Override
+ public Optional findIndexableObject(Context context, String id) throws SQLException {
+ final MetadataField metadataField = metadataFieldService.find(context, Integer.parseInt(id));
+ return metadataField == null ? Optional.empty() : Optional.of(new IndexableMetadataField(metadataField));
+ }
+
+ @Override
+ public boolean supports(Object object) {
+ return object instanceof MetadataField;
+ }
+
+ @Override
+ public List getIndexableObjects(Context context, MetadataField object) {
+ return Arrays.asList(new IndexableMetadataField(object));
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java
new file mode 100644
index 0000000000..976cc4511c
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/factory/MetadataFieldIndexFactory.java
@@ -0,0 +1,19 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.discovery.indexobject.factory;
+
+import org.dspace.content.MetadataField;
+import org.dspace.discovery.indexobject.IndexableMetadataField;
+
+/**
+ * Factory interface for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
+ *
+ * @author Maria Verdonck (Atmire) on 14/07/2020
+ */
+public interface MetadataFieldIndexFactory extends IndexFactory {
+}
diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java
index fc2950ee2b..3c48a5244a 100644
--- a/dspace-api/src/main/java/org/dspace/eperson/EPerson.java
+++ b/dspace-api/src/main/java/org/dspace/eperson/EPerson.java
@@ -141,7 +141,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport {
return false;
}
final EPerson other = (EPerson) obj;
- if (this.getID() != other.getID()) {
+ if (!this.getID().equals(other.getID())) {
return false;
}
if (!StringUtils.equals(this.getEmail(), other.getEmail())) {
diff --git a/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java
new file mode 100644
index 0000000000..45855a74ad
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java
@@ -0,0 +1,162 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.external.provider.impl;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.dspace.content.dto.MetadataValueDTO;
+import org.dspace.external.model.ExternalDataObject;
+import org.dspace.external.provider.ExternalDataProvider;
+import org.dspace.importer.external.datamodel.ImportRecord;
+import org.dspace.importer.external.exception.MetadataSourceException;
+import org.dspace.importer.external.metadatamapping.MetadatumDTO;
+import org.dspace.importer.external.service.components.QuerySource;
+
+/**
+ * This class allows to configure a Live Import Provider as an External Data Provider
+ *
+ * @author Andrea Bollini (andrea.bollini at 4science.it)
+ *
+ */
+public class LiveImportDataProvider implements ExternalDataProvider {
+ /**
+ * The {@link QuerySource} live import provider
+ */
+ private QuerySource querySource;
+
+ /**
+ * An unique human readable identifier for this provider
+ */
+ private String sourceIdentifier;
+
+ private String recordIdMetadata;
+
+ private String displayMetadata = "dc.title";
+
+ @Override
+ public String getSourceIdentifier() {
+ return sourceIdentifier;
+ }
+
+ /**
+ * This method set the SourceIdentifier for the ExternalDataProvider
+ * @param sourceIdentifier The UNIQUE sourceIdentifier to be set on any LiveImport data provider
+ */
+ public void setSourceIdentifier(String sourceIdentifier) {
+ this.sourceIdentifier = sourceIdentifier;
+ }
+
+ /**
+ * This method set the MetadataSource for the ExternalDataProvider
+ * @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data
+ */
+ public void setMetadataSource(QuerySource querySource) {
+ this.querySource = querySource;
+ }
+
+ /**
+ * This method set dublin core identifier to use as metadata id
+ * @param recordIdMetadata dublin core identifier to use as metadata id
+ */
+ public void setRecordIdMetadata(String recordIdMetadata) {
+ this.recordIdMetadata = recordIdMetadata;
+ }
+
+ /**
+ * This method set the dublin core identifier to display the title
+ * @param displayMetadata metadata to use as title
+ */
+ public void setDisplayMetadata(String displayMetadata) {
+ this.displayMetadata = displayMetadata;
+ }
+
+ @Override
+ public Optional getExternalDataObject(String id) {
+ try {
+ ExternalDataObject externalDataObject = getExternalDataObject(querySource.getRecord(id));
+ return Optional.of(externalDataObject);
+ } catch (MetadataSourceException e) {
+ throw new RuntimeException(
+ "The live import provider " + querySource.getImportSource() + " throws an exception", e);
+ }
+ }
+
+ @Override
+ public List searchExternalDataObjects(String query, int start, int limit) {
+ Collection records;
+ try {
+ records = querySource.getRecords(query, start, limit);
+ return records.stream().map(r -> getExternalDataObject(r)).collect(Collectors.toList());
+ } catch (MetadataSourceException e) {
+ throw new RuntimeException(
+ "The live import provider " + querySource.getImportSource() + " throws an exception", e);
+ }
+ }
+
+ @Override
+ public boolean supports(String source) {
+ return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
+ }
+
+ @Override
+ public int getNumberOfResults(String query) {
+ try {
+ return querySource.getRecordsCount(query);
+ } catch (MetadataSourceException e) {
+ throw new RuntimeException(
+ "The live import provider " + querySource.getImportSource() + " throws an exception", e);
+ }
+ }
+
+ /**
+ * Internal method to convert an ImportRecord to an ExternalDataObject
+ *
+ * FIXME it would be useful to remove ImportRecord at all in favor of the
+ * ExternalDataObject
+ *
+ * @param record
+ * @return
+ */
+ private ExternalDataObject getExternalDataObject(ImportRecord record) {
+ //return 400 if no record were found
+ if (record == null) {
+ throw new IllegalArgumentException("No record found for query or id");
+ }
+ ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier);
+ String id = getFirstValue(record, recordIdMetadata);
+ String display = getFirstValue(record, displayMetadata);
+ externalDataObject.setId(id);
+ externalDataObject.setDisplayValue(display);
+ externalDataObject.setValue(display);
+ for (MetadatumDTO dto : record.getValueList()) {
+ // FIXME it would be useful to remove MetadatumDTO in favor of MetadataValueDTO
+ MetadataValueDTO mvDTO = new MetadataValueDTO();
+ mvDTO.setSchema(dto.getSchema());
+ mvDTO.setElement(dto.getElement());
+ mvDTO.setQualifier(dto.getQualifier());
+ mvDTO.setValue(dto.getValue());
+ externalDataObject.addMetadata(mvDTO);
+ }
+ return externalDataObject;
+ }
+
+ private String getFirstValue(ImportRecord record, String metadata) {
+ String id = null;
+ String[] split = StringUtils.split(metadata, ".", 3);
+ Collection values = record.getValue(split[0], split[1], split.length == 3 ? split[2] : null);
+ if (!values.isEmpty()) {
+ id = (values.iterator().next().getValue());
+ }
+ return id;
+ }
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
index 46bc317d13..9db4402007 100644
--- a/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
+++ b/dspace-api/src/main/java/org/dspace/identifier/DOIIdentifierProvider.java
@@ -761,9 +761,9 @@ public class DOIIdentifierProvider
Item item = (Item) dso;
List metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null);
+ String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator();
for (MetadataValue id : metadata) {
- if (id.getValue().startsWith(
- DOI.RESOLVER + String.valueOf(SLASH) + PREFIX + String.valueOf(SLASH) + NAMESPACE_SEPARATOR)) {
+ if (id.getValue().startsWith(leftPart)) {
return doiService.DOIFromExternalFormat(id.getValue());
}
}
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java
new file mode 100644
index 0000000000..272b149015
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/ArXivFieldMapping.java
@@ -0,0 +1,37 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.importer.external.arxiv.metadatamapping;
+
+import java.util.Map;
+import javax.annotation.Resource;
+
+import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
+
+/**
+ * An implementation of {@link AbstractMetadataFieldMapping}
+ * Responsible for defining the mapping of the ArXiv metadatum fields on the DSpace metadatum fields
+ *
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ */
+public class ArXivFieldMapping extends AbstractMetadataFieldMapping {
+
+ /**
+ * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
+ * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
+ * what metadatafield is generated.
+ *
+ * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
+ * the item.
+ */
+ @Override
+ @Resource(name = "arxivMetadataFieldMap")
+ public void setMetadataFieldMap(Map metadataFieldMap) {
+ super.setMetadataFieldMap(metadataFieldMap);
+ }
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java
new file mode 100644
index 0000000000..ed5ac5960b
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/metadatamapping/contributor/ArXivIdMetadataContributor.java
@@ -0,0 +1,60 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.importer.external.arxiv.metadatamapping.contributor;
+
+import java.util.Collection;
+
+import org.apache.axiom.om.OMElement;
+import org.dspace.importer.external.metadatamapping.MetadatumDTO;
+import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
+import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor;
+
+/**
+ * Arxiv specific implementation of {@link MetadataContributor}
+ * Responsible for generating the ArXiv Id from the retrieved item.
+ *
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
+ */
+public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor {
+
+ /**
+ * Retrieve the metadata associated with the given object.
+ * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
+ * list
+ *
+ * @param t A class to retrieve metadata from.
+ * @return a collection of import records. Only the identifier of the found records may be put in the record.
+ */
+ @Override
+ public Collection contributeMetadata(OMElement t) {
+ Collection values = super.contributeMetadata(t);
+ parseValue(values);
+ return values;
+ }
+
+ /**
+ * ArXiv returns a full URL as in the value, e.g. http://arxiv.org/abs/1911.11405v1.
+ * This method parses out the identifier from the end of the URL, e.g. 1911.11405v1.
+ *
+ * @param dtos Metadata which contains the items uri
+ */
+ private void parseValue(Collection dtos) {
+ if (dtos != null) {
+ for (MetadatumDTO dto : dtos) {
+ if (dto != null && dto.getValue() != null && dto.getValue().contains("/")) {
+ int startIndex = dto.getValue().lastIndexOf('/') + 1;
+ int endIndex = dto.getValue().length();
+ String id = dto.getValue().substring(startIndex, endIndex);
+ dto.setValue(id);
+ }
+ }
+ }
+ }
+
+}
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java
new file mode 100644
index 0000000000..6b418423fa
--- /dev/null
+++ b/dspace-api/src/main/java/org/dspace/importer/external/arxiv/service/ArXivImportMetadataSourceServiceImpl.java
@@ -0,0 +1,421 @@
+/**
+ * The contents of this file are subject to the license and copyright
+ * detailed in the LICENSE and NOTICE files at the root of the source
+ * tree and available online at
+ *
+ * http://www.dspace.org/license/
+ */
+package org.dspace.importer.external.arxiv.service;
+
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.concurrent.Callable;
+import javax.el.MethodNotFoundException;
+import javax.ws.rs.client.Client;
+import javax.ws.rs.client.ClientBuilder;
+import javax.ws.rs.client.Invocation;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.axiom.om.OMElement;
+import org.apache.axiom.om.OMXMLBuilderFactory;
+import org.apache.axiom.om.OMXMLParserWrapper;
+import org.apache.axiom.om.xpath.AXIOMXPath;
+import org.apache.commons.lang3.StringUtils;
+import org.dspace.content.Item;
+import org.dspace.importer.external.datamodel.ImportRecord;
+import org.dspace.importer.external.datamodel.Query;
+import org.dspace.importer.external.exception.MetadataSourceException;
+import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
+import org.dspace.importer.external.service.components.QuerySource;
+import org.jaxen.JaxenException;
+
+/**
+ * Implements a data source for querying ArXiv
+ *
+ * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
+ *
+ */
+public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService
+ implements QuerySource {
+
+ private WebTarget webTarget;
+ private String baseAddress;
+
+ /**
+ * Find the number of records matching the query string in ArXiv. Supports pagination.
+ *
+ * @param query a query string to base the search on.
+ * @param start offset to start at
+ * @param count number of records to retrieve.
+ * @return a set of records. Fully transformed.
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public Collection getRecords(String query, int start, int count) throws MetadataSourceException {
+ return retry(new SearchByQueryCallable(query, count, start));
+ }
+
+ /**
+ * Find records based on a object query and convert them to a list metadata mapped in ImportRecord.
+ * The entry with the key "query" of the Query's map will be used as query string value.
+ *
+ * @see org.dspace.importer.external.datamodel.Query
+ * @see org.dspace.importer.external.datamodel.ImportRecord
+ * @param query a query object to base the search on.
+ * @return a set of records. Fully transformed.
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public Collection getRecords(Query query) throws MetadataSourceException {
+ return retry(new SearchByQueryCallable(query));
+ }
+
+ /**
+ * Find the number of records matching the query string in ArXiv;
+ *
+ * @param query a query object to base the search on.
+ * @return the sum of the matching records over this import source
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public int getRecordsCount(String query) throws MetadataSourceException {
+ return retry(new CountByQueryCallable(query));
+ }
+
+
+ /**
+ * Find the number of records matching a query;
+ * The entry with the key "query" of the Query's map will be used to get the query string.
+ *
+ * @see org.dspace.importer.external.datamodel.Query
+ * @param query a query string to base the search on.
+ * @return the sum of the matching records over this import source
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public int getRecordsCount(Query query) throws MetadataSourceException {
+ return retry(new CountByQueryCallable(query));
+ }
+
+ /**
+ * Get a single record of metadata from the arxiv by ArXiv ID.
+ *
+ * @param id id of the record in ArXiv
+ * @return the first matching record
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+
+ @Override
+ public ImportRecord getRecord(String id) throws MetadataSourceException {
+ List records = retry(new SearchByIdCallable(id));
+ return records == null || records.isEmpty() ? null : records.get(0);
+ }
+
+ /**
+ * Get a single record from the ArXiv matching the query.
+ * Field "query" will be used to get data from.
+ *
+ * @see org.dspace.importer.external.datamodel.Query
+ * @param query a query matching a single record
+ * @return the first matching record
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public ImportRecord getRecord(Query query) throws MetadataSourceException {
+ List records = retry(new SearchByIdCallable(query));
+ return records == null || records.isEmpty() ? null : records.get(0);
+ }
+
+ /**
+ * Initialize the class
+ *
+ * @throws Exception on generic exception
+ */
+ @Override
+ public void init() throws Exception {
+ Client client = ClientBuilder.newClient();
+ webTarget = client.target(baseAddress);
+ }
+
+ /**
+ * The string that identifies this import implementation. Preferable a URI
+ *
+ * @return the identifying uri
+ */
+ @Override
+ public String getImportSource() {
+ return "arxiv";
+ }
+
+ /**
+ * Expect this method will be not used and erased from the interface soon
+ */
+ @Override
+ public Collection findMatchingRecords(Item item) throws MetadataSourceException {
+ // FIXME: we need this method?
+ throw new MethodNotFoundException("This method is not implemented for ArXiv");
+ }
+
+ /**
+ * Finds records based on query object.
+ * Supports search by title and/or author
+ *
+ * @param query a query object to base the search on.
+ * @return a collection of import records.
+ * @throws MetadataSourceException if the underlying methods throw any exception.
+ */
+ @Override
+ public Collection findMatchingRecords(Query query) throws MetadataSourceException {
+ return retry(new FindMatchingRecordCallable(query));
+ }
+
+ /**
+ * This class is a Callable implementation to count the number of entries for an ArXiv
+ * query.
+ * This Callable use as query value to ArXiv the string queryString passed to constructor.
+ * If the object will be construct through Query.class instance, the value of the Query's
+ * map with the key "query" will be used.
+ *
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
+ */
+ private class CountByQueryCallable implements Callable {
+ private Query query;
+
+
+ private CountByQueryCallable(String queryString) {
+ query = new Query();
+ query.addParameter("query", queryString);
+ }
+
+ private CountByQueryCallable(Query query) {
+ this.query = query;
+ }
+
+
+ @Override
+ public Integer call() throws Exception {
+ String queryString = query.getParameterAsClass("query", String.class);
+ Integer start = query.getParameterAsClass("start", Integer.class);
+ Integer maxResult = query.getParameterAsClass("count", Integer.class);
+ WebTarget local = webTarget.queryParam("search_query", queryString);
+ if (maxResult != null) {
+ local = local.queryParam("max_results", String.valueOf(maxResult));
+ }
+ if (start != null) {
+ local = local.queryParam("start", String.valueOf(start));
+ }
+ Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
+ Response response = invocationBuilder.get();
+ if (response.getStatus() == 200) {
+ String responseString = response.readEntity(String.class);
+ OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString));
+ OMElement element = records.getDocumentElement();
+ AXIOMXPath xpath = null;
+ try {
+ xpath = new AXIOMXPath("opensearch:totalResults");
+ xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/");
+ OMElement count = (OMElement) xpath.selectSingleNode(element);
+ return Integer.parseInt(count.getText());
+ } catch (JaxenException e) {
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+ }
+
+ /**
+ * This class is a Callable implementation to get ArXiv entries based on
+ * query object.
+ * This Callable use as query value the string queryString passed to constructor.
+ * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
+ * Pagination is supported too, using the value of the Query's map with keys "start" and "count".
+ *
+ * @see org.dspace.importer.external.datamodel.Query
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
+ */
+ private class SearchByQueryCallable implements Callable> {
+ private Query query;
+
+
+ private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
+ query = new Query();
+ query.addParameter("query", queryString);
+ query.addParameter("start", start);
+ query.addParameter("count", maxResult);
+ }
+
+ private SearchByQueryCallable(Query query) {
+ this.query = query;
+ }
+
+
+ @Override
+ public List call() throws Exception {
+ List results = new ArrayList();
+ String queryString = query.getParameterAsClass("query", String.class);
+ Integer start = query.getParameterAsClass("start", Integer.class);
+ Integer maxResult = query.getParameterAsClass("count", Integer.class);
+ WebTarget local = webTarget.queryParam("search_query", queryString);
+ if (maxResult != null) {
+ local = local.queryParam("max_results", String.valueOf(maxResult));
+ }
+ if (start != null) {
+ local = local.queryParam("start", String.valueOf(start));
+ }
+ Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
+ Response response = invocationBuilder.get();
+ if (response.getStatus() == 200) {
+ String responseString = response.readEntity(String.class);
+ List omElements = splitToRecords(responseString);
+ for (OMElement record : omElements) {
+ results.add(transformSourceRecords(record));
+ }
+ return results;
+ } else {
+ return null;
+ }
+ }
+ }
+
+ /**
+ * This class is a Callable implementation to get an ArXiv entry using ArXiv ID
+ * The ID to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
+ *
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
+ */
+ private class SearchByIdCallable implements Callable> {
+ private Query query;
+
+ private SearchByIdCallable(Query query) {
+ this.query = query;
+ }
+
+ private SearchByIdCallable(String id) {
+ this.query = new Query();
+ query.addParameter("id", id);
+ }
+
+ @Override
+ public List call() throws Exception {
+ List results = new ArrayList();
+ String arxivid = query.getParameterAsClass("id", String.class);
+ if (StringUtils.isNotBlank(arxivid)) {
+ arxivid = arxivid.trim();
+ if (arxivid.startsWith("http://arxiv.org/abs/")) {
+ arxivid = arxivid.substring("http://arxiv.org/abs/".length());
+ } else if (arxivid.toLowerCase().startsWith("arxiv:")) {
+ arxivid = arxivid.substring("arxiv:".length());
+ }
+ }
+ WebTarget local = webTarget.queryParam("id_list", arxivid);
+ Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
+ Response response = invocationBuilder.get();
+ if (response.getStatus() == 200) {
+ String responseString = response.readEntity(String.class);
+ List omElements = splitToRecords(responseString);
+ for (OMElement record : omElements) {
+ results.add(transformSourceRecords(record));
+ }
+ return results;
+ } else {
+ return null;
+ }
+ }
+ }
+
+ /**
+ * This class is a Callable implementation to search ArXiv entries
+ * using author and title.
+ * There are two field in the Query map to pass, with keys "title" and "author"
+ * (at least one must be used).
+ *
+ * @see org.dspace.importer.external.datamodel.Query
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
+ */
+ private class FindMatchingRecordCallable implements Callable> {
+
+ private Query query;
+
+ private FindMatchingRecordCallable(Query q) {
+ query = q;
+ }
+
+ @Override
+ public List call() throws Exception {
+ String queryString = getQuery(this.query);
+ List results = new ArrayList();
+ WebTarget local = webTarget.queryParam("search_query", queryString);
+ Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
+ Response response = invocationBuilder.get();
+ if (response.getStatus() == 200) {
+ String responseString = response.readEntity(String.class);
+ List omElements = splitToRecords(responseString);
+ for (OMElement record : omElements) {
+ results.add(transformSourceRecords(record));
+ }
+ return results;
+ } else {
+ return null;
+ }
+ }
+
+ private String getQuery(Query query) {
+ String title = query.getParameterAsClass("title", String.class);
+ String author = query.getParameterAsClass("author", String.class);
+ StringBuffer queryString = new StringBuffer();
+ if (StringUtils.isNotBlank(title)) {
+ queryString.append("ti:\"").append(title).append("\"");
+ }
+ if (StringUtils.isNotBlank(author)) {
+ // [FAU]
+ if (queryString.length() > 0) {
+ queryString.append(" AND ");
+ }
+ queryString.append("au:\"").append(author).append("\"");
+ }
+ return queryString.toString();
+ }
+ }
+
+ private List splitToRecords(String recordsSrc) {
+ OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
+ OMElement element = records.getDocumentElement();
+ AXIOMXPath xpath = null;
+ try {
+ xpath = new AXIOMXPath("ns:entry");
+ xpath.addNamespace("ns", "http://www.w3.org/2005/Atom");
+ List recordsList = xpath.selectNodes(element);
+ return recordsList;
+ } catch (JaxenException e) {
+ return null;
+ }
+ }
+
+ /**
+ * Return the baseAddress set to this object
+ *
+ * @return The String object that represents the baseAddress of this object
+ */
+ public String getBaseAddress() {
+ return baseAddress;
+ }
+
+ /**
+ * Set the baseAddress to this object
+ *
+ * @param baseAddress The String object that represents the baseAddress of this object
+ */
+ public void setBaseAddress(String baseAddress) {
+ this.baseAddress = baseAddress;
+ }
+}
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java
index 8c5e1b394a..8f392bdb52 100644
--- a/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java
+++ b/dspace-api/src/main/java/org/dspace/importer/external/datamodel/Query.java
@@ -71,7 +71,7 @@ public class Query {
return null;
} else {
Object o = c.iterator().next();
- if (clazz.isAssignableFrom(o.getClass())) {
+ if (o != null && clazz.isAssignableFrom(o.getClass())) {
return (T) o;
} else {
return null;
diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java
index ef2571acc6..c8d2467d5f 100644
--- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java
+++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java
@@ -21,6 +21,8 @@ import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Required;
/**
@@ -31,6 +33,8 @@ import org.springframework.beans.factory.annotation.Required;
public class SimpleXpathMetadatumContributor implements MetadataContributor {
private MetadataFieldConfig field;
+ private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class);
+
/**
* Return prefixToNamespaceMapping
*
@@ -157,12 +161,12 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor extends Ab
*
* @param generateQueryForItem the query generator to be used.
*/
- @Autowired
public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) {
this.generateQueryForItem = generateQueryForItem;
}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java
deleted file mode 100644
index ebc898e4cf..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivFileDataLoader.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-
-package org.dspace.submit.lookup;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.List;
-import java.util.Map;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-
-import gr.ekt.bte.core.DataLoadingSpec;
-import gr.ekt.bte.core.Record;
-import gr.ekt.bte.core.RecordSet;
-import gr.ekt.bte.core.Value;
-import gr.ekt.bte.dataloader.FileDataLoader;
-import gr.ekt.bte.exceptions.MalformedSourceException;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.logging.log4j.Logger;
-import org.dspace.app.util.XMLUtils;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.xml.sax.SAXException;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class ArXivFileDataLoader extends FileDataLoader {
-
- private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ArXivFileDataLoader.class);
-
- Map fieldMap; // mapping between service fields and local
- // intermediate fields
-
- /**
- * Empty constructor
- */
- public ArXivFileDataLoader() {
- }
-
- /**
- * @param filename Name of file to load ArXiv data from.
- */
- public ArXivFileDataLoader(String filename) {
- super(filename);
- }
-
- /*
- * {@see gr.ekt.bte.core.DataLoader#getRecords()}
- *
- * @throws MalformedSourceException
- */
- @Override
- public RecordSet getRecords() throws MalformedSourceException {
-
- RecordSet recordSet = new RecordSet();
-
- try {
- InputStream inputStream = new FileInputStream(new File(filename));
-
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
-
- DocumentBuilder db = factory.newDocumentBuilder();
- Document inDoc = db.parse(inputStream);
-
- Element xmlRoot = inDoc.getDocumentElement();
- List dataRoots = XMLUtils.getElementList(xmlRoot, "entry");
-
- for (Element dataRoot : dataRoots) {
- Record record = ArxivUtils.convertArxixDomToRecord(dataRoot);
- if (record != null) {
- recordSet.addRecord(convertFields(record));
- }
- }
- } catch (FileNotFoundException e) {
- log.error(e.getMessage(), e);
- } catch (ParserConfigurationException e) {
- log.error(e.getMessage(), e);
- } catch (SAXException e) {
- log.error(e.getMessage(), e);
- } catch (IOException e) {
- log.error(e.getMessage(), e);
- }
-
- return recordSet;
- }
-
- /*
- * (non-Javadoc)
- *
- * @see
- * gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
- */
- @Override
- public RecordSet getRecords(DataLoadingSpec spec)
- throws MalformedSourceException {
- if (spec.getOffset() > 0) {
- return new RecordSet();
- }
- return getRecords();
- }
-
- public Record convertFields(Record publication) {
- for (String fieldName : fieldMap.keySet()) {
- String md = null;
- if (fieldMap != null) {
- md = this.fieldMap.get(fieldName);
- }
-
- if (StringUtils.isBlank(md)) {
- continue;
- } else {
- md = md.trim();
- }
-
- if (publication.isMutable()) {
- List values = publication.getValues(fieldName);
- publication.makeMutable().removeField(fieldName);
- publication.makeMutable().addField(md, values);
- }
- }
-
- return publication;
- }
-
- public void setFieldMap(Map fieldMap) {
- this.fieldMap = fieldMap;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java
deleted file mode 100644
index e477412621..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivOnlineDataLoader.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-package org.dspace.submit.lookup;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import gr.ekt.bte.core.Record;
-import org.apache.http.HttpException;
-import org.dspace.core.Context;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class ArXivOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
- protected ArXivService arXivService = new ArXivService();
-
- protected boolean searchProvider = true;
-
- public void setArXivService(ArXivService arXivService) {
- this.arXivService = arXivService;
- }
-
- @Override
- public List getSupportedIdentifiers() {
- return Arrays.asList(new String[] {ARXIV, DOI});
- }
-
- public void setSearchProvider(boolean searchProvider) {
- this.searchProvider = searchProvider;
- }
-
- @Override
- public boolean isSearchProvider() {
- return searchProvider;
- }
-
- @Override
- public List getByIdentifier(Context context,
- Map> keys) throws HttpException, IOException {
- List results = new ArrayList();
- if (keys != null) {
- Set dois = keys.get(DOI);
- Set arxivids = keys.get(ARXIV);
- List items = new ArrayList();
- if (dois != null && dois.size() > 0) {
- items.addAll(arXivService.getByDOIs(dois));
- }
- if (arxivids != null && arxivids.size() > 0) {
- for (String arxivid : arxivids) {
- items.add(arXivService.getByArXivIDs(arxivid));
- }
- }
-
- for (Record item : items) {
- results.add(convertFields(item));
- }
- }
- return results;
- }
-
- @Override
- public List search(Context context, String title, String author,
- int year) throws HttpException, IOException {
- List results = new ArrayList();
- List items = arXivService.searchByTerm(title, author, year);
- for (Record item : items) {
- results.add(convertFields(item));
- }
- return results;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java
deleted file mode 100644
index 337fb4175a..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArXivService.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-package org.dspace.submit.lookup;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-
-import gr.ekt.bte.core.Record;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.http.HttpException;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
-import org.apache.http.StatusLine;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.params.CoreConnectionPNames;
-import org.apache.http.params.HttpParams;
-import org.dspace.app.util.XMLUtils;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class ArXivService {
- private int timeout = 1000;
-
- /**
- * How long to wait for a connection to be established.
- *
- * @param timeout milliseconds
- */
- public void setTimeout(int timeout) {
- this.timeout = timeout;
- }
-
- public List getByDOIs(Set dois) throws HttpException,
- IOException {
- if (dois != null && dois.size() > 0) {
- String doisQuery = StringUtils.join(dois.iterator(), " OR ");
- return search(doisQuery, null, 100);
- }
- return null;
- }
-
- public List searchByTerm(String title, String author, int year)
- throws HttpException, IOException {
- StringBuffer query = new StringBuffer();
- if (StringUtils.isNotBlank(title)) {
- query.append("ti:\"").append(title).append("\"");
- }
- if (StringUtils.isNotBlank(author)) {
- // [FAU]
- if (query.length() > 0) {
- query.append(" AND ");
- }
- query.append("au:\"").append(author).append("\"");
- }
- return search(query.toString(), "", 10);
- }
-
- protected List search(String query, String arxivid, int max_result)
- throws IOException, HttpException {
- List results = new ArrayList();
- HttpGet method = null;
- try {
- HttpClient client = new DefaultHttpClient();
- HttpParams params = client.getParams();
- params.setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout);
-
- try {
- URIBuilder uriBuilder = new URIBuilder("http://export.arxiv.org/api/query");
- uriBuilder.addParameter("id_list", arxivid);
- uriBuilder.addParameter("search_query", query);
- uriBuilder.addParameter("max_results", String.valueOf(max_result));
- method = new HttpGet(uriBuilder.build());
- } catch (URISyntaxException ex) {
- throw new HttpException(ex.getMessage());
- }
-
- // Execute the method.
- HttpResponse response = client.execute(method);
- StatusLine responseStatus = response.getStatusLine();
- int statusCode = responseStatus.getStatusCode();
-
- if (statusCode != HttpStatus.SC_OK) {
- if (statusCode == HttpStatus.SC_BAD_REQUEST) {
- throw new RuntimeException("arXiv query is not valid");
- } else {
- throw new RuntimeException("Http call failed: "
- + responseStatus);
- }
- }
-
- try {
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
- // disallow DTD parsing to ensure no XXE attacks can occur.
- // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
- factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
-
- DocumentBuilder db = factory.newDocumentBuilder();
- Document inDoc = db.parse(response.getEntity().getContent());
-
- Element xmlRoot = inDoc.getDocumentElement();
- List dataRoots = XMLUtils.getElementList(xmlRoot,
- "entry");
-
- for (Element dataRoot : dataRoots) {
- Record crossitem = ArxivUtils
- .convertArxixDomToRecord(dataRoot);
- if (crossitem != null) {
- results.add(crossitem);
- }
- }
- } catch (Exception e) {
- throw new RuntimeException(
- "ArXiv identifier is not valid or not exist");
- }
- } finally {
- if (method != null) {
- method.releaseConnection();
- }
- }
-
- return results;
- }
-
- public Record getByArXivIDs(String raw) throws HttpException, IOException {
- if (StringUtils.isNotBlank(raw)) {
- raw = raw.trim();
- if (raw.startsWith("http://arxiv.org/abs/")) {
- raw = raw.substring("http://arxiv.org/abs/".length());
- } else if (raw.toLowerCase().startsWith("arxiv:")) {
- raw = raw.substring("arxiv:".length());
- }
- List result = search("", raw, 1);
- if (result != null && result.size() > 0) {
- return result.get(0);
- }
- }
- return null;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java b/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java
deleted file mode 100644
index 4caa0a957b..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/ArxivUtils.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-/**
- *
- */
-package org.dspace.submit.lookup;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import gr.ekt.bte.core.MutableRecord;
-import gr.ekt.bte.core.Record;
-import gr.ekt.bte.core.StringValue;
-import gr.ekt.bte.core.Value;
-import org.dspace.app.util.XMLUtils;
-import org.dspace.submit.util.SubmissionLookupPublication;
-import org.w3c.dom.Element;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class ArxivUtils {
-
- /**
- * Default constructor
- */
- private ArxivUtils() { }
-
- public static Record convertArxixDomToRecord(Element dataRoot) {
- MutableRecord record = new SubmissionLookupPublication("");
-
- String articleTitle = XMLUtils.getElementValue(dataRoot, "title");
- if (articleTitle != null) {
- record.addValue("title", new StringValue(articleTitle));
- }
- String summary = XMLUtils.getElementValue(dataRoot, "summary");
- if (summary != null) {
- record.addValue("summary", new StringValue(summary));
- }
- String year = XMLUtils.getElementValue(dataRoot, "published");
- if (year != null) {
- record.addValue("published", new StringValue(year));
- }
- String splashPageUrl = XMLUtils.getElementValue(dataRoot, "id");
- if (splashPageUrl != null) {
- record.addValue("id", new StringValue(splashPageUrl));
- }
- String comment = XMLUtils.getElementValue(dataRoot, "arxiv:comment");
- if (comment != null) {
- record.addValue("comment", new StringValue(comment));
- }
-
- List links = XMLUtils.getElementList(dataRoot, "link");
- if (links != null) {
- for (Element link : links) {
- if ("related".equals(link.getAttribute("rel"))
- && "pdf".equals(link.getAttribute("title"))) {
- String pdfUrl = link.getAttribute("href");
- if (pdfUrl != null) {
- record.addValue("pdfUrl", new StringValue(pdfUrl));
- }
- }
- }
- }
-
- String doi = XMLUtils.getElementValue(dataRoot, "arxiv:doi");
- if (doi != null) {
- record.addValue("doi", new StringValue(doi));
- }
- String journalRef = XMLUtils.getElementValue(dataRoot,
- "arxiv:journal_ref");
- if (journalRef != null) {
- record.addValue("journalRef", new StringValue(journalRef));
- }
-
- List primaryCategory = new LinkedList();
- List primaryCategoryList = XMLUtils.getElementList(dataRoot,
- "arxiv:primary_category");
- if (primaryCategoryList != null) {
- for (Element primaryCategoryElement : primaryCategoryList) {
- primaryCategory
- .add(primaryCategoryElement.getAttribute("term"));
- }
- }
-
- if (primaryCategory.size() > 0) {
- List values = new LinkedList();
- for (String s : primaryCategory) {
- values.add(new StringValue(s));
- }
- record.addField("primaryCategory", values);
- }
-
- List category = new LinkedList();
- List categoryList = XMLUtils.getElementList(dataRoot,
- "category");
- if (categoryList != null) {
- for (Element categoryElement : categoryList) {
- category.add(categoryElement.getAttribute("term"));
- }
- }
-
- if (category.size() > 0) {
- List values = new LinkedList();
- for (String s : category) {
- values.add(new StringValue(s));
- }
- record.addField("category", values);
- }
-
- List authors = new LinkedList();
- List authorsWithAffiliations = new LinkedList();
- List authorList = XMLUtils.getElementList(dataRoot, "author");
- if (authorList != null) {
- for (Element authorElement : authorList) {
- String authorName = XMLUtils.getElementValue(authorElement, "name");
- String authorAffiliation = XMLUtils.getElementValue(authorElement, "arxiv:affiliation");
-
- authors.add(authorName);
- authorsWithAffiliations.add(authorName + ": " + authorAffiliation);
- }
- }
-
- if (authors.size() > 0) {
- List values = new LinkedList();
- for (String sArray : authors) {
- values.add(new StringValue(sArray));
- }
- record.addField("author", values);
- }
-
- if (authorsWithAffiliations.size() > 0) {
- List values = new LinkedList();
- for (String sArray : authorsWithAffiliations) {
- values.add(new StringValue(sArray));
- }
- record.addField("authorWithAffiliation", values);
- }
-
- return record;
- }
-
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java
deleted file mode 100644
index 05a37e64d6..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedFileDataLoader.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-
-package org.dspace.submit.lookup;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.List;
-import java.util.Map;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-
-import gr.ekt.bte.core.DataLoadingSpec;
-import gr.ekt.bte.core.Record;
-import gr.ekt.bte.core.RecordSet;
-import gr.ekt.bte.core.Value;
-import gr.ekt.bte.dataloader.FileDataLoader;
-import gr.ekt.bte.exceptions.MalformedSourceException;
-import org.apache.commons.lang3.StringUtils;
-import org.dspace.app.util.XMLUtils;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.xml.sax.SAXException;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class PubmedFileDataLoader extends FileDataLoader {
-
- Map fieldMap; // mapping between service fields and local
- // intermediate fields
-
- /**
- *
- */
- public PubmedFileDataLoader() {
- }
-
- /**
- * @param filename Name of file to load CiNii data from.
- */
- public PubmedFileDataLoader(String filename) {
- super(filename);
- }
-
- /*
- * {@see gr.ekt.bte.core.DataLoader#getRecords()}
- *
- * @throws MalformedSourceException
- */
- @Override
- public RecordSet getRecords() throws MalformedSourceException {
-
- RecordSet recordSet = new RecordSet();
-
- try {
- InputStream inputStream = new FileInputStream(new File(filename));
-
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
-
- DocumentBuilder builder = factory.newDocumentBuilder();
- Document inDoc = builder.parse(inputStream);
-
- Element xmlRoot = inDoc.getDocumentElement();
- List pubArticles = XMLUtils.getElementList(xmlRoot,
- "PubmedArticle");
-
- for (Element xmlArticle : pubArticles) {
- Record record = null;
- try {
- record = PubmedUtils.convertPubmedDomToRecord(xmlArticle);
- recordSet.addRecord(convertFields(record));
- } catch (Exception e) {
- throw new RuntimeException(e.getMessage(), e);
- }
- }
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- } catch (ParserConfigurationException e) {
- e.printStackTrace();
- } catch (SAXException e) {
- e.printStackTrace();
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- return recordSet;
-
- }
-
- /*
- * (non-Javadoc)
- *
- * @see
- * gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
- */
- @Override
- public RecordSet getRecords(DataLoadingSpec spec)
- throws MalformedSourceException {
- if (spec.getOffset() > 0) {
- return new RecordSet();
- }
- return getRecords();
- }
-
- public Record convertFields(Record publication) {
- for (String fieldName : fieldMap.keySet()) {
- String md = null;
- if (fieldMap != null) {
- md = this.fieldMap.get(fieldName);
- }
-
- if (StringUtils.isBlank(md)) {
- continue;
- } else {
- md = md.trim();
- }
-
- if (publication.isMutable()) {
- List values = publication.getValues(fieldName);
- publication.makeMutable().removeField(fieldName);
- publication.makeMutable().addField(md, values);
- }
- }
-
- return publication;
- }
-
- public void setFieldMap(Map fieldMap) {
- this.fieldMap = fieldMap;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java
deleted file mode 100644
index 094ce4e21d..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedOnlineDataLoader.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-package org.dspace.submit.lookup;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import gr.ekt.bte.core.Record;
-import org.apache.http.HttpException;
-import org.apache.logging.log4j.Logger;
-import org.dspace.core.Context;
-import org.dspace.core.LogManager;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class PubmedOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
- protected boolean searchProvider = true;
-
- private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedOnlineDataLoader.class);
-
- protected PubmedService pubmedService = new PubmedService();
-
- public void setPubmedService(PubmedService pubmedService) {
- this.pubmedService = pubmedService;
- }
-
- @Override
- public List getSupportedIdentifiers() {
- return Arrays.asList(new String[] {PUBMED, DOI});
- }
-
- public void setSearchProvider(boolean searchProvider) {
- this.searchProvider = searchProvider;
- }
-
- @Override
- public boolean isSearchProvider() {
- return searchProvider;
- }
-
- @Override
- public List getByIdentifier(Context context,
- Map> keys) throws HttpException, IOException {
- Set pmids = keys != null ? keys.get(PUBMED) : null;
- Set dois = keys != null ? keys.get(DOI) : null;
- List results = new ArrayList();
- if (pmids != null && pmids.size() > 0
- && (dois == null || dois.size() == 0)) {
- for (String pmid : pmids) {
- Record p = null;
- try {
- p = pubmedService.getByPubmedID(pmid);
- } catch (Exception e) {
- log.error(LogManager.getHeader(context, "getByIdentifier",
- "pmid=" + pmid), e);
- }
- if (p != null) {
- results.add(convertFields(p));
- }
- }
- } else if (dois != null && dois.size() > 0
- && (pmids == null || pmids.size() == 0)) {
- StringBuffer query = new StringBuffer();
- for (String d : dois) {
- if (query.length() > 0) {
- query.append(" OR ");
- }
- query.append(d).append("[AI]");
- }
-
- List pubmedResults = pubmedService.search(query.toString());
- for (Record p : pubmedResults) {
- results.add(convertFields(p));
- }
- } else if (dois != null && dois.size() > 0 && pmids != null
- && pmids.size() > 0) {
- // EKT:ToDo: support list of dois and pmids in the search method of
- // pubmedService
- List pubmedResults = pubmedService.search(dois.iterator()
- .next(), pmids.iterator().next());
- if (pubmedResults != null) {
- for (Record p : pubmedResults) {
- results.add(convertFields(p));
- }
- }
- }
-
- return results;
- }
-
- @Override
- public List search(Context context, String title, String author,
- int year) throws HttpException, IOException {
- List pubmedResults = pubmedService.search(title, author, year);
- List results = new ArrayList();
- if (pubmedResults != null) {
- for (Record p : pubmedResults) {
- results.add(convertFields(p));
- }
- }
- return results;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java
deleted file mode 100644
index a5e74322f5..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedService.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-package org.dspace.submit.lookup;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-
-import gr.ekt.bte.core.Record;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.http.HttpException;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
-import org.apache.http.StatusLine;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.utils.URIBuilder;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.params.CoreConnectionPNames;
-import org.apache.logging.log4j.Logger;
-import org.dspace.app.util.XMLUtils;
-import org.dspace.core.ConfigurationManager;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.xml.sax.SAXException;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class PubmedService {
-
- private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedService.class);
-
- protected int timeout = 1000;
-
- public void setTimeout(int timeout) {
- this.timeout = timeout;
- }
-
- public Record getByPubmedID(String pubmedid) throws HttpException,
- IOException, ParserConfigurationException, SAXException {
- List ids = new ArrayList();
- ids.add(pubmedid.trim());
- List items = getByPubmedIDs(ids);
- if (items != null && items.size() > 0) {
- return items.get(0);
- }
- return null;
- }
-
- public List search(String title, String author, int year)
- throws HttpException, IOException {
- StringBuffer query = new StringBuffer();
- if (StringUtils.isNotBlank(title)) {
- query.append("((").append(title).append("[TI]) OR (");
- // [TI] does not always work, book chapter title
- query.append("(").append(title).append("[book]))");
- }
- if (StringUtils.isNotBlank(author)) {
- // [FAU]
- if (query.length() > 0) {
- query.append(" AND ");
- }
- query.append("(").append(author).append("[AU])");
- }
- if (year != -1) {
- // [DP]
- if (query.length() > 0) {
- query.append(" AND ");
- }
- query.append(year).append("[DP]");
- }
- return search(query.toString());
- }
-
- public List search(String query) throws IOException, HttpException {
- List results = new ArrayList<>();
- if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo")) {
- HttpGet method = null;
- try {
- HttpClient client = new DefaultHttpClient();
- client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout);
-
- URIBuilder uriBuilder = new URIBuilder(
- "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi");
- uriBuilder.addParameter("db", "pubmed");
- uriBuilder.addParameter("datetype", "edat");
- uriBuilder.addParameter("retmax", "10");
- uriBuilder.addParameter("term", query);
- method = new HttpGet(uriBuilder.build());
-
- // Execute the method.
- HttpResponse response = client.execute(method);
- StatusLine statusLine = response.getStatusLine();
- int statusCode = statusLine.getStatusCode();
-
- if (statusCode != HttpStatus.SC_OK) {
- throw new RuntimeException("WS call failed: "
- + statusLine);
- }
-
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
- // disallow DTD parsing to ensure no XXE attacks can occur.
- // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
- factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
-
- DocumentBuilder builder;
- try {
- builder = factory.newDocumentBuilder();
-
- Document inDoc = builder.parse(response.getEntity().getContent());
-
- Element xmlRoot = inDoc.getDocumentElement();
- Element idList = XMLUtils.getSingleElement(xmlRoot,
- "IdList");
- List pubmedIDs = XMLUtils.getElementValueList(
- idList, "Id");
- results = getByPubmedIDs(pubmedIDs);
- } catch (ParserConfigurationException e1) {
- log.error(e1.getMessage(), e1);
- } catch (SAXException e1) {
- log.error(e1.getMessage(), e1);
- }
- } catch (Exception e1) {
- log.error(e1.getMessage(), e1);
- } finally {
- if (method != null) {
- method.releaseConnection();
- }
- }
- } else {
- InputStream stream = null;
- try {
- File file = new File(
- ConfigurationManager.getProperty("dspace.dir")
- + "/config/crosswalks/demo/pubmed-search.xml");
- stream = new FileInputStream(file);
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
- // disallow DTD parsing to ensure no XXE attacks can occur.
- // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
- factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
-
- DocumentBuilder builder = factory.newDocumentBuilder();
- Document inDoc = builder.parse(stream);
-
- Element xmlRoot = inDoc.getDocumentElement();
- Element idList = XMLUtils.getSingleElement(xmlRoot, "IdList");
- List pubmedIDs = XMLUtils.getElementValueList(idList,
- "Id");
- results = getByPubmedIDs(pubmedIDs);
- } catch (Exception e) {
- throw new RuntimeException(e.getMessage(), e);
- } finally {
- if (stream != null) {
- try {
- stream.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
- }
- return results;
- }
-
- public List getByPubmedIDs(List pubmedIDs)
- throws HttpException, IOException, ParserConfigurationException,
- SAXException {
- List results = new ArrayList();
- HttpGet method = null;
- try {
- HttpClient client = new DefaultHttpClient();
- client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5 * timeout);
-
- try {
- URIBuilder uriBuilder = new URIBuilder(
- "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi");
- uriBuilder.addParameter("db", "pubmed");
- uriBuilder.addParameter("retmode", "xml");
- uriBuilder.addParameter("rettype", "full");
- uriBuilder.addParameter("id", StringUtils.join(
- pubmedIDs.iterator(), ","));
- method = new HttpGet(uriBuilder.build());
- } catch (URISyntaxException ex) {
- throw new RuntimeException("Request not sent", ex);
- }
-
- // Execute the method.
- HttpResponse response = client.execute(method);
- StatusLine statusLine = response.getStatusLine();
- int statusCode = statusLine.getStatusCode();
-
- if (statusCode != HttpStatus.SC_OK) {
- throw new RuntimeException("WS call failed: " + statusLine);
- }
-
- DocumentBuilderFactory factory = DocumentBuilderFactory
- .newInstance();
- factory.setValidating(false);
- factory.setIgnoringComments(true);
- factory.setIgnoringElementContentWhitespace(true);
- // disallow DTD parsing to ensure no XXE attacks can occur.
- // See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
- factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
-
- DocumentBuilder builder = factory.newDocumentBuilder();
- Document inDoc = builder
- .parse(response.getEntity().getContent());
-
- Element xmlRoot = inDoc.getDocumentElement();
- List pubArticles = XMLUtils.getElementList(xmlRoot,
- "PubmedArticle");
-
- for (Element xmlArticle : pubArticles) {
- Record pubmedItem = null;
- try {
- pubmedItem = PubmedUtils
- .convertPubmedDomToRecord(xmlArticle);
- results.add(pubmedItem);
- } catch (Exception e) {
- throw new RuntimeException(
- "PubmedID is not valid or not exist: "
- + e.getMessage(), e);
- }
- }
-
- return results;
- } finally {
- if (method != null) {
- method.releaseConnection();
- }
- }
- }
-
- public List search(String doi, String pmid) throws HttpException,
- IOException {
- StringBuffer query = new StringBuffer();
- if (StringUtils.isNotBlank(doi)) {
- query.append(doi);
- query.append("[AID]");
- }
- if (StringUtils.isNotBlank(pmid)) {
- // [FAU]
- if (query.length() > 0) {
- query.append(" OR ");
- }
- query.append(pmid).append("[PMID]");
- }
- return search(query.toString());
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java b/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java
deleted file mode 100644
index bca34de295..0000000000
--- a/dspace-api/src/main/java/org/dspace/submit/lookup/PubmedUtils.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/**
- * The contents of this file are subject to the license and copyright
- * detailed in the LICENSE and NOTICE files at the root of the source
- * tree and available online at
- *
- * http://www.dspace.org/license/
- */
-/**
- *
- */
-package org.dspace.submit.lookup;
-
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import gr.ekt.bte.core.MutableRecord;
-import gr.ekt.bte.core.Record;
-import gr.ekt.bte.core.StringValue;
-import gr.ekt.bte.core.Value;
-import org.apache.commons.lang3.StringUtils;
-import org.dspace.app.util.XMLUtils;
-import org.dspace.submit.util.SubmissionLookupPublication;
-import org.w3c.dom.Element;
-
-/**
- * @author Andrea Bollini
- * @author Kostas Stamatis
- * @author Luigi Andrea Pascarelli
- * @author Panagiotis Koutsourakis
- */
-public class PubmedUtils {
-
- /**
- * Default constructor
- */
- private PubmedUtils() { }
-
- public static Record convertPubmedDomToRecord(Element pubArticle) {
- MutableRecord record = new SubmissionLookupPublication("");
-
- Map monthToNum = new HashMap();
- monthToNum.put("Jan", "01");
- monthToNum.put("Feb", "02");
- monthToNum.put("Mar", "03");
- monthToNum.put("Apr", "04");
- monthToNum.put("May", "05");
- monthToNum.put("Jun", "06");
- monthToNum.put("Jul", "07");
- monthToNum.put("Aug", "08");
- monthToNum.put("Sep", "09");
- monthToNum.put("Oct", "10");
- monthToNum.put("Nov", "11");
- monthToNum.put("Dec", "12");
-
- Element medline = XMLUtils.getSingleElement(pubArticle,
- "MedlineCitation");
-
- Element article = XMLUtils.getSingleElement(medline, "Article");
- Element pubmed = XMLUtils.getSingleElement(pubArticle, "PubmedData");
-
- Element identifierList = XMLUtils.getSingleElement(pubmed,
- "ArticleIdList");
- if (identifierList != null) {
- List identifiers = XMLUtils.getElementList(identifierList,
- "ArticleId");
- if (identifiers != null) {
- for (Element id : identifiers) {
- if ("pubmed".equals(id.getAttribute("IdType"))) {
- String pubmedID = id.getTextContent().trim();
- if (pubmedID != null) {
- record.addValue("pubmedID", new StringValue(
- pubmedID));
- }
- } else if ("doi".equals(id.getAttribute("IdType"))) {
- String doi = id.getTextContent().trim();
- if (doi != null) {
- record.addValue("doi", new StringValue(doi));
- }
- }
- }
- }
- }
-
- String status = XMLUtils.getElementValue(pubmed, "PublicationStatus");
- if (status != null) {
- record.addValue("publicationStatus", new StringValue(status));
- }
-
- String pubblicationModel = XMLUtils.getElementAttribute(medline,
- "Article", "PubModel");
- if (pubblicationModel != null) {
- record.addValue("pubModel", new StringValue(
- pubblicationModel));
- }
-
- String title = XMLUtils.getElementValue(article, "ArticleTitle");
- if (title != null) {
- record.addValue("articleTitle", new StringValue(title));
- }
-
- Element abstractElement = XMLUtils
- .getSingleElement(article, "Abstract");
- if (abstractElement == null) {
- abstractElement = XMLUtils.getSingleElement(medline,
- "OtherAbstract");
- }
- if (abstractElement != null) {
- String summary = XMLUtils.getElementValue(abstractElement,
- "AbstractText");
- if (summary != null) {
- record.addValue("abstractText", new StringValue(summary));
- }
- }
-
- List authors = new LinkedList();
- Element authorList = XMLUtils.getSingleElement(article, "AuthorList");
- if (authorList != null) {
- List authorsElement = XMLUtils.getElementList(authorList,
- "Author");
- if (authorsElement != null) {
- for (Element author : authorsElement) {
- if (StringUtils.isBlank(XMLUtils.getElementValue(author,
- "CollectiveName"))) {
- authors.add(new String[] {
- XMLUtils.getElementValue(author, "ForeName"),
- XMLUtils.getElementValue(author, "LastName")});
- }
- }
- }
- }
- if (authors.size() > 0) {
- List values = new LinkedList();
- for (String[] sArray : authors) {
- values.add(new StringValue(sArray[1] + ", " + sArray[0]));
- }
- record.addField("author", values);
- }
-
- Element journal = XMLUtils.getSingleElement(article, "Journal");
- if (journal != null) {
- List jnumbers = XMLUtils.getElementList(journal, "ISSN");
- if (jnumbers != null) {
- for (Element jnumber : jnumbers) {
- if ("Print".equals(jnumber.getAttribute("IssnType"))) {
- String issn = jnumber.getTextContent().trim();
- if (issn != null) {
- record.addValue("printISSN", new StringValue(issn));
- }
- } else {
- String eissn = jnumber.getTextContent().trim();
- if (eissn != null) {
- record.addValue("electronicISSN", new StringValue(eissn));
- }
- }
- }
- }
-
- String journalTitle = XMLUtils.getElementValue(journal, "Title");
- if (journalTitle != null) {
- record.addValue("journalTitle", new StringValue(journalTitle));
- }
-
- Element journalIssueElement = XMLUtils.getSingleElement(journal,
- "JournalIssue");
- if (journalIssueElement != null) {
- String volume = XMLUtils.getElementValue(journalIssueElement,
- "Volume");
- if (volume != null) {
- record.addValue("journalVolume", new StringValue(volume));
- }
-
- String issue = XMLUtils.getElementValue(journalIssueElement,
- "Issue");
- if (issue != null) {
- record.addValue("journalIssue", new StringValue(issue));
- }
-
- Element pubDateElement = XMLUtils.getSingleElement(
- journalIssueElement, "PubDate");
-
- String pubDate = null;
- if (pubDateElement != null) {
- pubDate = XMLUtils.getElementValue(pubDateElement, "Year");
-
- String mounth = XMLUtils.getElementValue(pubDateElement,
- "Month");
- String day = XMLUtils
- .getElementValue(pubDateElement, "Day");
- if (StringUtils.isNotBlank(mounth)
- && monthToNum.containsKey(mounth)) {
- pubDate += "-" + monthToNum.get(mounth);
- if (StringUtils.isNotBlank(day)) {
- pubDate += "-" + (day.length() == 1 ? "0" + day : day);
- }
- }
- }
- if (pubDate == null) {
- pubDate = XMLUtils.getElementValue(pubDateElement, "MedlineDate");
- }
- if (pubDate != null) {
- record.addValue("pubDate", new StringValue(pubDate));
- }
- }
-
- String language = XMLUtils.getElementValue(article, "Language");
- if (language != null) {
- record.addValue("language", new StringValue(language));
- }
-
- List type = new LinkedList();
- Element publicationTypeList = XMLUtils.getSingleElement(article,
- "PublicationTypeList");
- if (publicationTypeList != null) {
- List publicationTypes = XMLUtils.getElementList(
- publicationTypeList, "PublicationType");
- for (Element publicationType : publicationTypes) {
- type.add(publicationType.getTextContent().trim());
- }
- }
- if (type.size() > 0) {
- List values = new LinkedList();
- for (String s : type) {
- values.add(new StringValue(s));
- }
- record.addField("publicationType", values);
- }
-
- List primaryKeywords = new LinkedList();
- List secondaryKeywords = new LinkedList();
- Element keywordsList = XMLUtils.getSingleElement(medline,
- "KeywordList");
- if (keywordsList != null) {
- List keywords = XMLUtils.getElementList(keywordsList,
- "Keyword");
- for (Element keyword : keywords) {
- if ("Y".equals(keyword.getAttribute("MajorTopicYN"))) {
- primaryKeywords.add(keyword.getTextContent().trim());
- } else {
- secondaryKeywords.add(keyword.getTextContent().trim());
- }
- }
- }
- if (primaryKeywords.size() > 0) {
- List values = new LinkedList();
- for (String s : primaryKeywords) {
- values.add(new StringValue(s));
- }
- record.addField("primaryKeyword", values);
- }
- if (secondaryKeywords.size() > 0) {
- List values = new LinkedList();
- for (String s : secondaryKeywords) {
- values.add(new StringValue(s));
- }
- record.addField("secondaryKeyword", values);
- }
-
- List primaryMeshHeadings = new LinkedList();
- List secondaryMeshHeadings = new LinkedList();
- Element meshHeadingsList = XMLUtils.getSingleElement(medline,
- "MeshHeadingList");
- if (meshHeadingsList != null) {
- List meshHeadings = XMLUtils.getElementList(
- meshHeadingsList, "MeshHeading");
- for (Element meshHeading : meshHeadings) {
- if ("Y".equals(XMLUtils.getElementAttribute(meshHeading,
- "DescriptorName", "MajorTopicYN"))) {
- primaryMeshHeadings.add(XMLUtils.getElementValue(
- meshHeading, "DescriptorName"));
- } else {
- secondaryMeshHeadings.add(XMLUtils.getElementValue(
- meshHeading, "DescriptorName"));
- }
- }
- }
- if (primaryMeshHeadings.size() > 0) {
- List values = new LinkedList();
- for (String s : primaryMeshHeadings) {
- values.add(new StringValue(s));
- }
- record.addField("primaryMeshHeading", values);
- }
- if (secondaryMeshHeadings.size() > 0) {
- List values = new LinkedList();
- for (String s : secondaryMeshHeadings) {
- values.add(new StringValue(s));
- }
- record.addField("secondaryMeshHeading", values);
- }
-
- Element paginationElement = XMLUtils.getSingleElement(article,
- "Pagination");
- if (paginationElement != null) {
- String startPage = XMLUtils.getElementValue(paginationElement,
- "StartPage");
- String endPage = XMLUtils.getElementValue(paginationElement,
- "EndPage");
- if (StringUtils.isBlank(startPage)) {
- startPage = XMLUtils.getElementValue(paginationElement,
- "MedlinePgn");
- }
-
- if (startPage != null) {
- record.addValue("startPage", new StringValue(startPage));
- }
- if (endPage != null) {
- record.addValue("endPage", new StringValue(endPage));
- }
- }
- }
-
- return record;
- }
-}
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java
index ffc62dcddb..4150d84d04 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/XmlWorkflowFactoryImpl.java
@@ -97,7 +97,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory {
}
@Override
- public List getCollectionHandlesMappedToWorklow(Context context, String workflowName) {
+ public List getCollectionHandlesMappedToWorkflow(Context context, String workflowName) {
List collectionsMapped = new ArrayList<>();
for (String handle : this.workflowMapping.keySet()) {
if (this.workflowMapping.get(handle).getID().equals(workflowName)) {
@@ -107,7 +107,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory {
collectionsMapped.add(collection);
}
} catch (SQLException e) {
- log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorklow trying to " +
+ log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorkflow trying to " +
"retrieve collection with handle: " + handle, e);
}
}
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java
index 5d33843747..db856bb57b 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/factory/XmlWorkflowFactory.java
@@ -86,7 +86,7 @@ public interface XmlWorkflowFactory {
* @param workflowName Name of workflow we want the collections of that are mapped to is
* @return List of collections mapped to the requested workflow
*/
- public List getCollectionHandlesMappedToWorklow(Context context, String workflowName);
+ public List getCollectionHandlesMappedToWorkflow(Context context, String workflowName);
/**
* Returns list of collections that are not mapped to any configured workflow, and thus use the default workflow
diff --git a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java
index a982107d78..16befc2626 100644
--- a/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java
+++ b/dspace-api/src/main/java/org/dspace/xmlworkflow/state/Step.java
@@ -81,7 +81,7 @@ public class Step implements BeanNameAware {
/**
* Get the next step based on out the outcome
* @param outcome the outcome of the previous step
- * @return the next stepp or NULL if there is no step configured for this outcome
+ * @return the next step or NULL if there is no step configured for this outcome
*/
public Step getNextStep(int outcome) {
return outcomes.get(outcome);
diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml
index 53f49870a7..0046366f2e 100644
--- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml
+++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml
@@ -19,8 +19,14 @@
-
+
+
@@ -32,19 +38,22 @@
+
+
+
+
+
+
+
-
+
xml
diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
index 76cb57a40d..bad2c19131 100644
--- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
+++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/scripts.xml
@@ -14,12 +14,12 @@
-
+
-
+
-
+
diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java
index a6a4780aa9..d7379351e5 100644
--- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java
+++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataExportIT.java
@@ -13,6 +13,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
+import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
@@ -23,12 +24,22 @@ import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
+import org.dspace.scripts.DSpaceRunnable;
+import org.dspace.scripts.configuration.ScriptConfiguration;
+import org.dspace.scripts.factory.ScriptServiceFactory;
+import org.dspace.scripts.service.ScriptService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
public class MetadataExportIT
extends AbstractIntegrationTestWithDatabase {
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
private final ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -59,4 +70,34 @@ public class MetadataExportIT
assertTrue(fileContent.contains("Donald, Smith"));
assertTrue(fileContent.contains(String.valueOf(item.getID())));
}
+
+ @Test(expected = ParseException.class)
+ public void metadataExportWithoutFileParameter()
+ throws IllegalAccessException, InstantiationException, ParseException {
+ context.turnOffAuthorisationSystem();
+ Community community = CommunityBuilder.createCommunity(context)
+ .build();
+ Collection collection = CollectionBuilder.createCollection(context, community)
+ .build();
+ Item item = ItemBuilder.createItem(context, collection)
+ .withAuthor("Donald, Smith")
+ .build();
+ context.restoreAuthSystemState();
+
+ String[] args = new String[] {"metadata-export",
+ "-i", String.valueOf(item.getHandle())};
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
+ ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
+
+ DSpaceRunnable script = null;
+ if (scriptConfiguration != null) {
+ script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
+ }
+ if (script != null) {
+ script.initialize(args, testDSpaceRunnableHandler, null);
+ script.run();
+ }
+ }
}
diff --git a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java
index a4f6b67b06..4a0043586b 100644
--- a/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java
+++ b/dspace-api/src/test/java/org/dspace/app/bulkedit/MetadataImportTest.java
@@ -7,10 +7,12 @@
*/
package org.dspace.app.bulkedit;
+import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertTrue;
import java.io.File;
+import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.AbstractIntegrationTest;
import org.dspace.app.launcher.ScriptLauncher;
@@ -22,16 +24,25 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
+import org.dspace.scripts.DSpaceRunnable;
+import org.dspace.scripts.configuration.ScriptConfiguration;
+import org.dspace.scripts.factory.ScriptServiceFactory;
+import org.dspace.scripts.service.ScriptService;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
public class MetadataImportTest extends AbstractIntegrationTest {
private final ItemService itemService
- = ContentServiceFactory.getInstance().getItemService();
+ = ContentServiceFactory.getInstance().getItemService();
private final CollectionService collectionService
- = ContentServiceFactory.getInstance().getCollectionService();
+ = ContentServiceFactory.getInstance().getCollectionService();
private final CommunityService communityService
- = ContentServiceFactory.getInstance().getCommunityService();
+ = ContentServiceFactory.getInstance().getCommunityService();
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
@Test
public void metadataImportTest() throws Exception {
@@ -50,6 +61,7 @@ public class MetadataImportTest extends AbstractIntegrationTest {
StringUtils.equals(
itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY).get(0).getValue(),
"Donald, SmithImported"));
+ assertEquals(importedItem.getSubmitter(), eperson);
context.turnOffAuthorisationSystem();
itemService.delete(context, itemService.find(context, importedItem.getID()));
@@ -57,4 +69,24 @@ public class MetadataImportTest extends AbstractIntegrationTest {
communityService.delete(context, communityService.find(context, community.getID()));
context.restoreAuthSystemState();
}
+
+ @Test(expected = ParseException.class)
+ public void metadataImportWithoutEPersonParameterTest()
+ throws IllegalAccessException, InstantiationException, ParseException {
+ String fileLocation = new File(testProps.get("test.importcsv").toString()).getAbsolutePath();
+ String[] args = new String[] {"metadata-import", "-f", fileLocation, "-s"};
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
+ ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
+
+ DSpaceRunnable script = null;
+ if (scriptConfiguration != null) {
+ script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
+ }
+ if (script != null) {
+ script.initialize(args, testDSpaceRunnableHandler, null);
+ script.run();
+ }
+ }
}
diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java
similarity index 65%
rename from dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java
rename to dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java
index a8c4b42173..2dfe3a781f 100644
--- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/csv/CSVMetadataImportReferenceIT.java
+++ b/dspace-api/src/test/java/org/dspace/app/csv/CSVMetadataImportReferenceIT.java
@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
-package org.dspace.app.rest.csv;
+package org.dspace.app.csv;
import static junit.framework.TestCase.assertEquals;
@@ -19,13 +19,18 @@ import java.util.Iterator;
import java.util.List;
import java.util.UUID;
+import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.bulkedit.MetadataImportException;
import org.dspace.app.bulkedit.MetadataImportInvalidHeadingException;
-import org.dspace.app.rest.test.AbstractEntityIntegrationTest;
+import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
+import org.dspace.builder.EntityTypeBuilder;
import org.dspace.builder.ItemBuilder;
+import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.content.Collection;
+import org.dspace.content.Community;
+import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
@@ -35,35 +40,57 @@ import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataValueService;
import org.dspace.content.service.RelationshipService;
+import org.dspace.scripts.DSpaceRunnable;
+import org.dspace.scripts.configuration.ScriptConfiguration;
+import org.dspace.scripts.factory.ScriptServiceFactory;
+import org.dspace.scripts.service.ScriptService;
import org.junit.Before;
import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
/**
* Created by: Andrew Wood
* Date: 26 Jul 2019
*/
-public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest {
+public class CSVMetadataImportReferenceIT extends AbstractIntegrationTestWithDatabase {
//Common collection to utilize for test
private Collection col1;
- @Autowired
- private RelationshipService relationshipService;
+ private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
+ private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
- @Autowired
- private ItemService itemService;
+
+ Community parentCommunity;
/**
* Setup testing enviorment
*/
@Before
- public void setup() {
+ public void setup() throws SQLException {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
+
col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build();
+
+
+ context.turnOffAuthorisationSystem();
+
+ EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build();
+ EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build();
+ EntityType project = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build();
+ EntityType orgUnit = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build();
+
+ RelationshipTypeBuilder
+ .createRelationshipTypeBuilder(context, publication, person, "isAuthorOfPublication",
+ "isPublicationOfAuthor", 0, null, 0,
+ null).withCopyToLeft(false).withCopyToRight(true).build();
+
+ RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publication, project, "isProjectOfPublication",
+ "isPublicationOfProject", 0, null, 0,
+ null).withCopyToRight(true).build();
+
context.restoreAuthSystemState();
}
@@ -102,8 +129,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test
public void testSingleMdRef() throws Exception {
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
- "+,Person,," + col1.getHandle() + ",0",
- "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"};
+ "+,Person,," + col1.getHandle() + ",0",
+ "+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"};
Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1, "left", 0);
}
@@ -119,7 +146,7 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
performImportScript(csvLines, false);
Item[] items = new Item[csvLines.length - 1];
for (int i = 0; i < items.length; i++) {
- items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString());
+ items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString());
}
return items;
}
@@ -132,8 +159,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testSingleRowNameRef() throws Exception {
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
"dc.identifier.other",
- "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0",
- "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"};
+ "+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0",
+ "+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1, "left", 0);
}
@@ -145,9 +172,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test
public void testMultiMdRef() throws Exception {
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
- "+,Person,," + col1.getHandle() + ",0",
- "+,Person,," + col1.getHandle() + ",1",
- "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"};
+ "+,Person,," + col1.getHandle() + ",0",
+ "+,Person,," + col1.getHandle() + ",1",
+ "+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"};
Item[] items = runImport(csv);
assertRelationship(items[2], items[0], 1, "left", 0);
assertRelationship(items[2], items[1], 1, "left", 1);
@@ -160,9 +187,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test
public void testMultiRowNameRef() throws Exception {
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName",
- "+,Person,," + col1.getHandle() + ",0,val1",
- "+,Person,," + col1.getHandle() + ",1,val2",
- "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"};
+ "+,Person,," + col1.getHandle() + ",0,val1",
+ "+,Person,," + col1.getHandle() + ",1,val2",
+ "+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"};
Item[] items = runImport(csv);
assertRelationship(items[2], items[0], 1, "left", 0);
assertRelationship(items[2], items[1], 1, "left", 1);
@@ -176,11 +203,16 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testSingleUUIDReference() throws Exception {
context.turnOffAuthorisationSystem();
Item person = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .build();
+ .withTitle("Author1")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
+ .withRelationshipType("Person")
+ .build();
context.restoreAuthSystemState();
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other",
- "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"};
+ "+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"};
Item[] items = runImport(csv);
assertRelationship(items[0], person, 1, "left", 0);
}
@@ -193,12 +225,21 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testMultiUUIDReference() throws Exception {
context.turnOffAuthorisationSystem();
Item person = ItemBuilder.createItem(context, col1)
+ .withTitle("Author1")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
.withRelationshipType("Person")
.build();
Item person2 = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .build();
- context.restoreAuthSystemState();
+ .withTitle("Author2")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, John")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("John")
+ .withRelationshipType("Person")
+ .build();
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other",
"+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," +
col1.getHandle() + ",anything,0"};
@@ -216,12 +257,16 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
context.turnOffAuthorisationSystem();
Item person = ItemBuilder.createItem(context, col1)
.withTitle("Person")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
.withRelationshipType("Person")
.build();
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
"dc.identifier.other",
- "+,Person2,Person,," + col1.getHandle() + ",idVal,0",
- "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"};
+ "+,Person2,Person,," + col1.getHandle() + ",idVal,0",
+ "+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"};
context.restoreAuthSystemState();
Item[] items = runImport(csv);
assertRelationship(items[1], person, 1, "left", 0);
@@ -238,16 +283,25 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
context.turnOffAuthorisationSystem();
Item person = ItemBuilder.createItem(context, col1)
.withTitle("Person")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
.withRelationshipType("Person")
.build();
Item person2 = ItemBuilder.createItem(context, col1)
- .withTitle("Person2")
- .withRelationshipType("Person")
- .build();
+ .withTitle("Person2")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, John")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("John")
+ .withRelationshipType("Person")
+ .build();
+
context.restoreAuthSystemState();
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
"dc.identifier.other",
- "+,Person3,Person,," + col1.getHandle() + ",idVal,0",
+ "+,Person3,Person,," + col1.getHandle() + ",idVal,0",
"+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," +
col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv);
@@ -264,8 +318,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testRefWithSpecialChar() throws Exception {
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
"dc.identifier.other",
- "+,Person:,Person,," + col1.getHandle() + ",idVal,0",
- "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"};
+ "+,Person:,Person,," + col1.getHandle() + ",idVal,0",
+ "+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"};
Item[] items = runImport(csv);
assertRelationship(items[1], items[0], 1, "left", 0);
}
@@ -300,14 +354,25 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test(expected = MetadataImportException.class)
public void testNonUniqueMDRefInDb() throws Exception {
context.turnOffAuthorisationSystem();
- ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .withIdentifierOther("1")
- .build();
- ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .withIdentifierOther("1")
- .build();
+ Item person = ItemBuilder.createItem(context, col1)
+ .withTitle("Person")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
+ .withRelationshipType("Person")
+ .withIdentifierOther("1")
+ .build();
+ Item person2 = ItemBuilder.createItem(context, col1)
+ .withTitle("Person2")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, John")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("John")
+ .withRelationshipType("Person")
+ .withIdentifierOther("1")
+ .build();
+
context.restoreAuthSystemState();
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
"+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"};
@@ -320,10 +385,15 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test(expected = MetadataImportException.class)
public void testNonUniqueMDRefInBoth() throws Exception {
context.turnOffAuthorisationSystem();
- ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .withIdentifierOther("1")
- .build();
+ Item person = ItemBuilder.createItem(context, col1)
+ .withTitle("Person")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
+ .withRelationshipType("Person")
+ .withIdentifierOther("1")
+ .build();
context.restoreAuthSystemState();
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
"+,Person,," + col1.getHandle() + ",1",
@@ -382,8 +452,10 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testInvalidRelationshipArchivedOrigin() throws Exception {
context.turnOffAuthorisationSystem();
Item testItem = ItemBuilder.createItem(context, col1)
- .withRelationshipType("OrgUnit")
- .build();
+ .withTitle("OrgUnit")
+ .withIssueDate("2017-10-17")
+ .withRelationshipType("OrgUnit")
+ .build();
context.restoreAuthSystemState();
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName",
"+,Person,," + col1.getHandle() + ",1" +
@@ -398,6 +470,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
public void testInvalidRelationshipArchivedTarget() throws Exception {
context.turnOffAuthorisationSystem();
Item testItem = ItemBuilder.createItem(context, col1)
+ .withTitle("OrgUnit")
+ .withIssueDate("2017-10-17")
.withRelationshipType("OrgUnit")
.build();
context.restoreAuthSystemState();
@@ -413,26 +487,42 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test
public void testValidRelationshipNoDefinedTypesInCSV() throws Exception {
context.turnOffAuthorisationSystem();
- Item testItemOne = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Person")
- .withIdentifierOther("testItemOne")
- .build();
- Item testItemTwo = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Publication")
- .withIdentifierOther("testItemTwo")
- .build();
- Item testItemThree = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Project")
- .withIdentifierOther("testItemThree")
- .build();
+
+ Item testItem = ItemBuilder.createItem(context, col1)
+ .withTitle("Person")
+ .withIssueDate("2017-10-17")
+ .withAuthor("Smith, Donald")
+ .withPersonIdentifierLastName("Smith")
+ .withPersonIdentifierFirstName("Donald")
+ .withRelationshipType("Person")
+ .withIdentifierOther("testItemOne")
+ .build();
+
+
+ Item testItem2 = ItemBuilder.createItem(context, col1)
+ .withTitle("Publication")
+ .withIssueDate("2017-10-17")
+ .withRelationshipType("Publication")
+ .withIdentifierOther("testItemTwo")
+ .build();
+
+
+ Item testItem3 = ItemBuilder.createItem(context, col1)
+ .withTitle("Project")
+ .withIssueDate("2017-10-17")
+ .withRelationshipType("Project")
+ .withIdentifierOther("testItemThree")
+ .build();
+
+
context.restoreAuthSystemState();
String[] csv = {"id,relation.isAuthorOfPublication,relation.isPublicationOfProject,collection",
- testItemOne.getID().toString() + ",,," + col1.getHandle(),
- testItemTwo.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(),
- testItemThree.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()};
+ testItem.getID().toString() + ",,," + col1.getHandle(),
+ testItem2.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(),
+ testItem3.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()};
performImportScript(csv, false);
- assertRelationship(testItemTwo, testItemOne, 1, "left", 0);
- assertRelationship(testItemTwo, testItemThree, 1, "left", 0);
+ assertRelationship(testItem2, testItem, 1, "left", 0);
+ assertRelationship(testItem2, testItem3, 1, "left", 0);
}
/**
@@ -455,14 +545,17 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
@Test(expected = MetadataImportException.class)
public void testInvalidTypeNameDefined() throws Exception {
context.turnOffAuthorisationSystem();
+
Item testItem = ItemBuilder.createItem(context, col1)
- .withRelationshipType("Publication")
- .build();
+ .withTitle("Publication")
+ .withIssueDate("2017-10-17")
+ .withRelationshipType("Publication")
+ .build();
context.restoreAuthSystemState();
String[] csv = {"id,collection,relationship.type,dc.title," +
"relation.isProjectOfPublication,relation.isPublicationOfProject",
"+," + col1.getHandle() + ",Project,Title," +
- testItem.getID().toString() + "," + testItem.getID().toString() };
+ testItem.getID().toString() + "," + testItem.getID().toString()};
performImportScript(csv, true);
}
@@ -477,17 +570,34 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
}
out.flush();
out.close();
+ String fileLocation = csvFile.getAbsolutePath();
try {
+ String[] args = null;
if (validateOnly) {
- return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com",
- "-s", "-v");
+ args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s", "-v"};
} else {
- return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com",
- "-s");
+ args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s",};
+ }
+ TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
+
+ ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
+ ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
+
+ DSpaceRunnable script = null;
+ if (scriptConfiguration != null) {
+ script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
+ }
+ if (script != null) {
+ script.initialize(args, testDSpaceRunnableHandler, null);
+ script.run();
+ }
+ if (testDSpaceRunnableHandler.getException() != null) {
+ throw testDSpaceRunnableHandler.getException();
}
} finally {
csvFile.delete();
}
+ return 0;
}
/**
diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java
index df92d0ea4b..69cfd0e136 100644
--- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java
+++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java
@@ -146,6 +146,32 @@ public abstract class AbstractDSpaceObjectBuilder
}
return (B) this;
}
+ /**
+ * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other
+ * READ permissions will be removed
+ *
+ * @param dso
+ * the DSpaceObject on which grant the permission
+ * @param eperson
+ * the eperson that will be granted of the permission
+ * @return the builder properly configured to build the object with the additional admin permission
+ */
+ protected > B setAdminPermission(DSpaceObject dso, EPerson eperson,
+ Date startDate) {
+ try {
+
+ ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, null,
+ eperson, startDate, Constants.ADMIN,
+ "Integration Test", dso);
+ if (rp != null) {
+ resourcePolicyService.update(context, rp);
+ }
+ } catch (Exception e) {
+ return handleException(e);
+ }
+ return (B) this;
+
+ }
/**
* Support method to grant {@link Constants#REMOVE} permission to a specific eperson
diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java
index e4f3a0e29d..ca2699c283 100644
--- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java
+++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java
@@ -19,6 +19,7 @@ import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
+import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
@@ -126,6 +127,19 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder- {
return this;
}
+ /**
+ * Create an admin group for the collection with the specified members
+ *
+ * @param members epersons to add to the admin group
+ * @return this builder
+ * @throws SQLException
+ * @throws AuthorizeException
+ */
+ public ItemBuilder withAdminUser(EPerson ePerson) throws SQLException, AuthorizeException {
+ return setAdminPermission(item, ePerson, null);
+ }
+
+
@Override
public Item build() {
try {
diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java
index 0a0c4ad6bc..e20281bdfe 100644
--- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java
+++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java
@@ -166,6 +166,11 @@ public class WorkspaceItemBuilder extends AbstractBuilder
+ * {@code
+ * https:///sitemaps/26453b4d-e513-44e8-8d5b-395f62972eff/sitemap0.html
+ * }
+ *
+ *
+ * @author Maria Verdonck (Atmire) on 08/07/2020
+ */
+@Controller
+@RequestMapping("/${sitemap.path:sitemaps}")
+public class SitemapRestController {
+
+ private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SitemapRestController.class);
+
+ @Autowired
+ ConfigurationService configurationService;
+
+ // Most file systems are configured to use block sizes of 4096 or 8192 and our buffer should be a multiple of that.
+ private static final int BUFFER_SIZE = 4096 * 10;
+
+ /**
+ * Tries to retrieve a matching sitemap file in configured location
+ *
+ * @param name the name of the requested sitemap file
+ * @param response the HTTP response
+ * @param request the HTTP request
+ * @throws SQLException if db error while completing DSpace context
+ * @throws IOException if IO error surrounding sitemap file
+ */
+ @GetMapping("/{name}")
+ public void retrieve(@PathVariable String name, HttpServletResponse response,
+ HttpServletRequest request) throws IOException, SQLException {
+ // Find sitemap with given name in dspace/sitemaps
+ File foundSitemapFile = null;
+ File sitemapOutputDir = new File(configurationService.getProperty("sitemap.dir"));
+ if (sitemapOutputDir.exists() && sitemapOutputDir.isDirectory()) {
+ // List of all files and directories inside sitemapOutputDir
+ File sitemapFilesList[] = sitemapOutputDir.listFiles();
+ for (File sitemapFile : sitemapFilesList) {
+ if (name.equalsIgnoreCase(sitemapFile.getName())) {
+ if (sitemapFile.isFile()) {
+ foundSitemapFile = sitemapFile;
+ } else {
+ throw new ResourceNotFoundException(
+ "Directory with name " + name + " in " + sitemapOutputDir.getAbsolutePath() +
+ " found, but no file.");
+ }
+ }
+ }
+ } else {
+ throw new ResourceNotFoundException(
+ "Sitemap directory in " + sitemapOutputDir.getAbsolutePath() + " does not " +
+ "exist, either sitemaps have not been generated (./dspace generate-sitemaps)," +
+ " or are located elsewhere (config used: sitemap.dir).");
+ }
+ if (foundSitemapFile == null) {
+ throw new ResourceNotFoundException(
+ "Could not find sitemap file with name " + name + " in " + sitemapOutputDir.getAbsolutePath());
+ } else {
+ // return found sitemap file
+ this.returnSitemapFile(foundSitemapFile, response, request);
+ }
+ }
+
+ /**
+ * Sends back the matching sitemap file as a MultipartFile, with the headers set with details of the file
+ * (content, size, name, last modified)
+ *
+ * @param foundSitemapFile the found sitemap file, with matching name as in request path
+ * @param response the HTTP response
+ * @param request the HTTP request
+ * @throws SQLException if db error while completing DSpace context
+ * @throws IOException if IO error surrounding sitemap file
+ */
+ private void returnSitemapFile(File foundSitemapFile, HttpServletResponse response, HttpServletRequest request)
+ throws SQLException, IOException {
+ // Pipe the bits
+ try (InputStream is = new FileInputStream(foundSitemapFile)) {
+ MultipartFileSender sender = MultipartFileSender
+ .fromInputStream(is)
+ .withBufferSize(BUFFER_SIZE)
+ .withFileName(foundSitemapFile.getName())
+ .withLength(foundSitemapFile.length())
+ .withMimetype(Files.probeContentType(foundSitemapFile.toPath()))
+ .with(request)
+ .with(response);
+
+ sender.withLastModified(foundSitemapFile.lastModified());
+
+ // Determine if we need to send the file as a download or if the browser can open it inline
+ long dispositionThreshold = configurationService.getLongProperty("webui.content_disposition_threshold");
+ if (dispositionThreshold >= 0 && foundSitemapFile.length() > dispositionThreshold) {
+ sender.withDisposition(MultipartFileSender.CONTENT_DISPOSITION_ATTACHMENT);
+ }
+
+ Context context = ContextUtil.obtainContext(request);
+
+ // We have all the data we need, close the connection to the database so that it doesn't stay open during
+ // download/streaming
+ context.complete();
+
+ // Send the data
+ if (sender.isValid()) {
+ sender.serveResource();
+ }
+
+ } catch (ClientAbortException e) {
+ log.debug("Client aborted the request before the download was completed. " +
+ "Client is probably switching to a Range request.", e);
+ }
+ }
+}
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java
index 7ae5f5ecc0..fd1192e0bb 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/WorkflowDefinitionCollectionsLinkRepository.java
@@ -12,13 +12,11 @@ import java.util.List;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
-import org.dspace.app.rest.converter.ConverterService;
import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.WorkflowDefinitionRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.app.rest.repository.AbstractDSpaceRestRepository;
import org.dspace.app.rest.repository.LinkRestRepository;
-import org.dspace.app.rest.utils.Utils;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
@@ -43,12 +41,6 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR
@Autowired
protected XmlWorkflowFactory xmlWorkflowFactory;
- @Autowired
- protected ConverterService converter;
-
- @Autowired
- protected Utils utils;
-
/**
* GET endpoint that returns the list of collections that make an explicit use of the workflow-definition.
* If a collection doesn't specify the workflow-definition to be used, the default mapping applies,
@@ -69,10 +61,10 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR
if (xmlWorkflowFactory.isDefaultWorkflow(workflowName)) {
collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getAllNonMappedCollectionsHandles(context));
}
- collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorklow(context,
+ collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorkflow(context,
workflowName));
Pageable pageable = optionalPageable != null ? optionalPageable : PageRequest.of(0, 20);
- return converter.toRestPage(collectionsMappedToWorkflow, pageable,
+ return super.converter.toRestPage(collectionsMappedToWorkflow, pageable,
projection);
} else {
throw new ResourceNotFoundException("No workflow with name " + workflowName + " is configured");
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java
index 5ce4977b5a..6cfee12751 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/AdministratorOfFeature.java
@@ -14,11 +14,13 @@ import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
import org.dspace.app.rest.model.BaseObjectRest;
import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.CommunityRest;
+import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.SiteRest;
import org.dspace.app.rest.utils.Utils;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
+import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -53,6 +55,10 @@ public class AdministratorOfFeature implements AuthorizationFeature {
Collection collection = (Collection) utils.getDSpaceAPIObjectFromRest(context, object);
return authService.isAdmin(context, collection);
}
+ if (object instanceof ItemRest) {
+ Item item = (Item) utils.getDSpaceAPIObjectFromRest(context, object);
+ return authService.isAdmin(context, item);
+ }
}
return authService.isAdmin(context);
}
@@ -62,7 +68,8 @@ public class AdministratorOfFeature implements AuthorizationFeature {
return new String[]{
SiteRest.CATEGORY + "." + SiteRest.NAME,
CommunityRest.CATEGORY + "." + CommunityRest.NAME,
- CollectionRest.CATEGORY + "." + CollectionRest.NAME
+ CollectionRest.CATEGORY + "." + CollectionRest.NAME,
+ ItemRest.CATEGORY + "." + ItemRest.NAME
};
}
}
\ No newline at end of file
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java
index fc786bfc85..84ce1a0032 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ConverterService.java
@@ -34,6 +34,7 @@ import org.dspace.app.rest.security.DSpacePermissionEvaluator;
import org.dspace.app.rest.security.WebSecurityExpressionEvaluator;
import org.dspace.app.rest.utils.Utils;
import org.dspace.services.RequestService;
+import org.springframework.aop.support.AopUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
@@ -51,6 +52,8 @@ import org.springframework.stereotype.Service;
/**
* Converts domain objects from the DSpace service layer to rest objects, and from rest objects to resource
* objects, applying {@link Projection}s where applicable.
+ *
+ * @author Luca Giamminonni (luca.giamminonni at 4science dot it)
*/
@Service
public class ConverterService {
@@ -149,14 +152,30 @@ public class ConverterService {
DSpaceRestRepository repositoryToUse = utils
.getResourceRepositoryByCategoryAndModel(baseObjectRest.getCategory(), baseObjectRest.getType());
Annotation preAuthorize = null;
- for (Method m : repositoryToUse.getClass().getMethods()) {
+ int maxDepth = 0;
+ // DS-4530 exclude the AOP Proxy from determining the annotations
+ for (Method m : AopUtils.getTargetClass(repositoryToUse).getMethods()) {
if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) {
- preAuthorize = AnnotationUtils.findAnnotation(m, PreAuthorize.class);
+ int depth = howManySuperclass(m.getDeclaringClass());
+ if (depth > maxDepth) {
+ preAuthorize = AnnotationUtils.findAnnotation(m, PreAuthorize.class);
+ maxDepth = depth;
+ }
}
}
return preAuthorize;
}
+ private int howManySuperclass(Class> declaringClass) {
+ Class curr = declaringClass;
+ int count = 0;
+ while (curr != Object.class) {
+ curr = curr.getSuperclass();
+ count++;
+ }
+ return count;
+ }
+
private Annotation getDefaultFindOnePreAuthorize() {
for (Method m : DSpaceRestRepository.class.getMethods()) {
if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) {
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java
index 4cba9153d3..04aa626153 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java
@@ -40,6 +40,8 @@ import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExcep
* @author Tom Desair (tom dot desair at atmire dot com)
* @author Frederic Van Reet (frederic dot vanreet at atmire dot com)
* @author Andrea Bollini (andrea.bollini at 4science.it)
+ * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
+ *
*/
@ControllerAdvice
public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionHandler {
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataFieldRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataFieldRest.java
index 966b3afbbe..4524f82a68 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataFieldRest.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/MetadataFieldRest.java
@@ -18,6 +18,7 @@ import org.dspace.app.rest.RestResourceController;
*/
public class MetadataFieldRest extends BaseObjectRest {
public static final String NAME = "metadatafield";
+ public static final String NAME_PLURAL = "metadatafields";
public static final String CATEGORY = RestAddressableModel.CORE;
@JsonIgnore
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ExternalSourceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ExternalSourceRestRepository.java
index 49a128cd85..948e25e364 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ExternalSourceRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ExternalSourceRestRepository.java
@@ -89,10 +89,10 @@ public class ExternalSourceRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) {
List externalSources = externalDataService.getExternalDataProviders();
- return converter.toRestPage(externalSources, pageable, externalSources.size(),
- utils.obtainProjection());
+ return converter.toRestPage(externalSources, pageable, utils.obtainProjection());
}
public Class getDomainClass() {
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java
index b7764b81dc..b0a5f526f0 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java
@@ -9,9 +9,11 @@ package org.dspace.app.rest.repository;
import static java.lang.Integer.parseInt;
import static org.apache.commons.lang3.StringUtils.isBlank;
+import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_EQUALS;
import java.io.IOException;
import java.sql.SQLException;
+import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
@@ -19,6 +21,8 @@ import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
@@ -31,6 +35,13 @@ import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
+import org.dspace.discovery.DiscoverQuery;
+import org.dspace.discovery.DiscoverResult;
+import org.dspace.discovery.IndexableObject;
+import org.dspace.discovery.SearchService;
+import org.dspace.discovery.SearchServiceException;
+import org.dspace.discovery.indexobject.IndexableMetadataField;
+import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -45,6 +56,10 @@ import org.springframework.stereotype.Component;
*/
@Component(MetadataFieldRest.CATEGORY + "." + MetadataFieldRest.NAME)
public class MetadataFieldRestRepository extends DSpaceRestRepository {
+ /**
+ * log4j logger
+ */
+ private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataFieldRestRepository.class);
@Autowired
MetadataFieldService metadataFieldService;
@@ -52,6 +67,9 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository findBySchema(@Parameter(value = "schema", required = true) String schemaName,
- Pageable pageable) {
+ Pageable pageable) {
try {
Context context = obtainContext();
MetadataSchema schema = metadataSchemaService.find(context, schemaName);
@@ -93,6 +111,108 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository findByFieldName(@Parameter(value = "schema", required = false) String schemaName,
+ @Parameter(value = "element", required = false) String elementName,
+ @Parameter(value = "qualifier", required = false) String qualifierName,
+ @Parameter(value = "query", required = false) String query,
+ @Parameter(value = "exactName", required = false) String exactName,
+ Pageable pageable) throws SQLException {
+ Context context = obtainContext();
+
+ List matchingMetadataFields = new ArrayList<>();
+
+ if (StringUtils.isBlank(exactName)) {
+ // Find matches in Solr Search core
+ DiscoverQuery discoverQuery =
+ this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query);
+ try {
+ DiscoverResult searchResult = searchService.search(context, null, discoverQuery);
+ for (IndexableObject object : searchResult.getIndexableObjects()) {
+ if (object instanceof IndexableMetadataField) {
+ matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject());
+ }
+ }
+ } catch (SearchServiceException e) {
+ log.error("Error while searching with Discovery", e);
+ throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage());
+ }
+ } else {
+ if (StringUtils.isNotBlank(elementName) || StringUtils.isNotBlank(qualifierName) ||
+ StringUtils.isNotBlank(schemaName) || StringUtils.isNotBlank(query)) {
+ throw new UnprocessableEntityException("Use either exactName or a combination of element, qualifier " +
+ "and schema to search discovery for metadata fields");
+ }
+ // Find at most one match with exactName query param in DB
+ MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.');
+ if (exactMatchingMdField != null) {
+ matchingMetadataFields.add(exactMatchingMdField);
+ }
+ }
+
+ return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection());
+ }
+
+ /**
+ * Creates a discovery query containing the filter queries derived from the request params
+ *
+ * @param context Context request
+ * @param schemaName an exact match of the prefix of the metadata schema (e.g. "dc", "dcterms", "eperson")
+ * @param elementName an exact match of the field's element (e.g. "contributor", "title")
+ * @param qualifierName an exact match of the field's qualifier (e.g. "author", "alternative")
+ * @param query part of the fully qualified field, should start with the start of the schema, element or
+ * qualifier (e.g. "dc.ti", "contributor", "auth", "contributor.ot")
+ * @return Discover query containing the filter queries derived from the request params
+ * @throws SQLException If DB error
+ */
+ private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName,
+ String qualifierName, String query) throws SQLException {
+ List filterQueries = new ArrayList<>();
+ if (StringUtils.isNotBlank(query)) {
+ if (query.split("\\.").length > 3) {
+ throw new IllegalArgumentException("Query param should not contain more than 2 dot (.) separators, " +
+ "forming schema.element.qualifier metadata field name");
+ }
+ filterQueries.add(searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS,
+ OPERATOR_EQUALS, query).getFilterQuery() + "*");
+ }
+ if (StringUtils.isNotBlank(schemaName)) {
+ filterQueries.add(
+ searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME, OPERATOR_EQUALS,
+ schemaName).getFilterQuery());
+ }
+ if (StringUtils.isNotBlank(elementName)) {
+ filterQueries.add(
+ searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME, OPERATOR_EQUALS,
+ elementName).getFilterQuery());
+ }
+ if (StringUtils.isNotBlank(qualifierName)) {
+ filterQueries.add(searchService
+ .toFilterQuery(context, MetadataFieldIndexFactoryImpl.QUALIFIER_FIELD_NAME, OPERATOR_EQUALS,
+ qualifierName).getFilterQuery());
+ }
+
+ DiscoverQuery discoverQuery = new DiscoverQuery();
+ discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()]));
+ return discoverQuery;
+ }
+
@Override
public Class getDomainClass() {
return MetadataFieldRest.class;
@@ -101,15 +221,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository mm = getDSpaceObjectService().getMetadata(source, metadata[0], metadata[1], metadata[2],
Item.ANY);
- getDSpaceObjectService().clearMetadata(context, source, metadata[0], metadata[1], metadata[2], Item.ANY);
if (index != -1) {
- int idx = 0;
- for (MetadataValue m : mm) {
- if (idx != index) {
- getDSpaceObjectService().addMetadata(context, source, metadata[0], metadata[1], metadata[2],
- m.getLanguage(), m.getValue(), m.getAuthority(),
- m.getConfidence());
- }
- idx++;
- }
+ getDSpaceObjectService().removeMetadataValues(context, source, Arrays.asList(mm.get(index)));
+ } else {
+ getDSpaceObjectService().clearMetadata(context, source, metadata[0], metadata[1], metadata[2], Item.ANY);
}
}
diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/MultipartFileSender.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/MultipartFileSender.java
index 4ae836bccf..284d0b87ab 100644
--- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/MultipartFileSender.java
+++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/MultipartFileSender.java
@@ -156,9 +156,13 @@ public class MultipartFileSender {
// Initialize response.
response.reset();
response.setBufferSize(bufferSize);
- response.setHeader(CONTENT_TYPE, contentType);
+ if (contentType != null) {
+ response.setHeader(CONTENT_TYPE, contentType);
+ }
response.setHeader(ACCEPT_RANGES, BYTES);
- response.setHeader(ETAG, checksum);
+ if (checksum != null) {
+ response.setHeader(ETAG, checksum);
+ }
response.setDateHeader(LAST_MODIFIED, lastModified);
response.setDateHeader(EXPIRES, System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);
@@ -481,4 +485,4 @@ public class MultipartFileSender {
return Arrays.binarySearch(matchValues, toMatch) > -1 || Arrays.binarySearch(matchValues, "*") > -1;
}
-}
\ No newline at end of file
+}
diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml
new file mode 100644
index 0000000000..294e197b70
--- /dev/null
+++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/rest/scripts.xml
@@ -0,0 +1,25 @@
+
+