mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge branch 'main' into D4CRIS-960
This commit is contained in:
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
25
.github/workflows/label_merge_conflicts.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# This workflow checks open PRs for merge conflicts and labels them when conflicts are found
|
||||
name: Check for merge conflicts
|
||||
|
||||
# Run whenever the "main" branch is updated
|
||||
# NOTE: This means merge conflicts are only checked for when a PR is merged to main.
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# See: https://github.com/mschilde/auto-label-merge-conflicts/
|
||||
- name: Auto-label PRs with merge conflicts
|
||||
uses: mschilde/auto-label-merge-conflicts@v2.0
|
||||
# Add "merge conflict" label if a merge conflict is detected. Remove it when resolved.
|
||||
# Note, the authentication token is created automatically
|
||||
# See: https://docs.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token
|
||||
with:
|
||||
CONFLICT_LABEL_NAME: 'merge conflict'
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Ignore errors
|
||||
continue-on-error: true
|
@@ -325,6 +325,14 @@
|
||||
<artifactId>apache-jena-libs</artifactId>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
<!-- Required to support PubMed API call in "PubmedImportMetadataSourceServiceImpl.GetRecord" -->
|
||||
<!-- Makes runtime operations in Jersey Dependency Injection -->
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.inject</groupId>
|
||||
<artifactId>jersey-hk2</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-cli</groupId>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
|
@@ -10,10 +10,14 @@ package org.dspace.app.bulkedit;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataDSpaceCsvExportService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
@@ -41,8 +45,7 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
public void internalRun() throws Exception {
|
||||
|
||||
if (help) {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
logHelpInfo();
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
@@ -61,6 +64,11 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
context.complete();
|
||||
}
|
||||
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export");
|
||||
handler.logInfo("partial export: metadata-export -i handle");
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataExportScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("metadata-export",
|
||||
@@ -75,17 +83,32 @@ public class MetadataExport extends DSpaceRunnable<MetadataExportScriptConfigura
|
||||
return;
|
||||
}
|
||||
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
filename = commandLine.getOptionValue('f');
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
if (!commandLine.hasOption('i')) {
|
||||
exportAllItems = true;
|
||||
}
|
||||
handle = commandLine.getOptionValue('i');
|
||||
filename = getFileNameForExportFile();
|
||||
|
||||
exportAllMetadata = commandLine.hasOption('a');
|
||||
|
||||
}
|
||||
|
||||
protected String getFileNameForExportFile() throws ParseException {
|
||||
Context context = new Context();
|
||||
try {
|
||||
DSpaceObject dso = null;
|
||||
if (StringUtils.isNotBlank(handle)) {
|
||||
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, handle);
|
||||
} else {
|
||||
dso = ContentServiceFactory.getInstance().getSiteService().findSite(context);
|
||||
}
|
||||
if (dso == null) {
|
||||
throw new ParseException("A handle got given that wasn't able to be parsed to a DSpaceObject");
|
||||
}
|
||||
return dso.getID().toString() + ".csv";
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to retrieve DSO for handle: " + handle, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
|
||||
public class MetadataExportCli extends MetadataExport {
|
||||
|
||||
@Override
|
||||
protected String getFileNameForExportFile() {
|
||||
return commandLine.getOptionValue('f');
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
// Check a filename is given
|
||||
if (!commandLine.hasOption('f')) {
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void logHelpInfo() {
|
||||
handler.logInfo("\nfull export: metadata-export -f filename");
|
||||
handler.logInfo("partial export: metadata-export -i handle -f filename");
|
||||
}
|
||||
}
|
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
public class MetadataExportCliScriptConfiguration extends MetadataExportScriptConfiguration<MetadataExportCli> {
|
||||
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream .class);
|
||||
options.getOption("f").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -7,7 +7,6 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.OutputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
@@ -56,9 +55,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
|
||||
|
||||
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
|
||||
options.getOption("i").setType(String.class);
|
||||
options.addOption("f", "file", true, "destination where you want file written");
|
||||
options.getOption("f").setType(OutputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("a", "all", false,
|
||||
"include all metadata fields that are not normally changed (e.g. provenance)");
|
||||
options.getOption("a").setType(boolean.class);
|
||||
|
@@ -182,24 +182,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
c.turnOffAuthorisationSystem();
|
||||
|
||||
// Find the EPerson, assign to context
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(c, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(c, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
c.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
assignCurrentUserInContext(c);
|
||||
|
||||
if (authorityControlled == null) {
|
||||
setAuthorizedMetadataFields();
|
||||
@@ -277,6 +260,18 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
|
||||
}
|
||||
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
UUID uuid = getEpersonIdentifier();
|
||||
if (uuid != null) {
|
||||
try {
|
||||
EPerson ePerson = EPersonServiceFactory.getInstance().getEPersonService().find(context, uuid);
|
||||
context.setCurrentUser(ePerson);
|
||||
} catch (SQLException e) {
|
||||
log.error("Something went wrong trying to fetch the eperson for uuid: " + uuid, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method determines whether the changes should be applied or not. This is default set to true for the REST
|
||||
* script as we don't want to interact with the caller. This will be overwritten in the CLI script to ask for
|
||||
@@ -312,9 +307,6 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
|
||||
throw new ParseException("Required parameter -f missing!");
|
||||
}
|
||||
filename = commandLine.getOptionValue('f');
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
|
||||
// Option to apply template to new items
|
||||
if (commandLine.hasOption('t')) {
|
||||
|
@@ -10,7 +10,12 @@ package org.dspace.app.bulkedit;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
|
||||
/**
|
||||
@@ -30,4 +35,34 @@ public class MetadataImportCLI extends MetadataImport {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assignCurrentUserInContext(Context context) throws ParseException {
|
||||
try {
|
||||
if (commandLine.hasOption('e')) {
|
||||
EPerson eperson;
|
||||
String e = commandLine.getOptionValue('e');
|
||||
if (e.indexOf('@') != -1) {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, e);
|
||||
} else {
|
||||
eperson = EPersonServiceFactory.getInstance().getEPersonService().find(context, UUID.fromString(e));
|
||||
}
|
||||
|
||||
if (eperson == null) {
|
||||
throw new ParseException("Error, eperson cannot be found: " + e);
|
||||
}
|
||||
context.setCurrentUser(eperson);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParseException("Unable to find DSpace user: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
super.setup();
|
||||
if (!commandLine.hasOption('e')) {
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,10 +7,21 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link org.dspace.app.bulkedit.MetadataImportCLI} CLI script
|
||||
*/
|
||||
public class MetadataImportCliScriptConfiguration extends MetadataImportScriptConfiguration<MetadataImportCLI> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = super.getOptions();
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
super.options = options;
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
@@ -57,9 +57,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
|
||||
options.addOption("f", "file", true, "source file");
|
||||
options.getOption("f").setType(InputStream.class);
|
||||
options.getOption("f").setRequired(true);
|
||||
options.addOption("e", "email", true, "email address or user id of user (required if adding new items)");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
options.addOption("s", "silent", false,
|
||||
"silent operation - doesn't request confirmation of changes USE WITH CAUTION");
|
||||
options.getOption("s").setType(boolean.class);
|
||||
|
@@ -27,6 +27,7 @@ import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -84,6 +85,9 @@ public class GenerateSitemaps {
|
||||
options
|
||||
.addOption("p", "ping", true,
|
||||
"ping specified search engine URL");
|
||||
options
|
||||
.addOption("d", "delete", false,
|
||||
"delete sitemaps dir and its contents");
|
||||
|
||||
CommandLine line = null;
|
||||
|
||||
@@ -105,10 +109,9 @@ public class GenerateSitemaps {
|
||||
}
|
||||
|
||||
/*
|
||||
* Sanity check -- if no sitemap generation or pinging to do, print
|
||||
* usage
|
||||
* Sanity check -- if no sitemap generation or pinging to do, or deletion, print usage
|
||||
*/
|
||||
if (line.getArgs().length != 0 || line.hasOption('b')
|
||||
if (line.getArgs().length != 0 || line.hasOption('d') || line.hasOption('b')
|
||||
&& line.hasOption('s') && !line.hasOption('g')
|
||||
&& !line.hasOption('m') && !line.hasOption('y')
|
||||
&& !line.hasOption('p')) {
|
||||
@@ -123,6 +126,10 @@ public class GenerateSitemaps {
|
||||
generateSitemaps(!line.hasOption('b'), !line.hasOption('s'));
|
||||
}
|
||||
|
||||
if (line.hasOption('d')) {
|
||||
deleteSitemaps();
|
||||
}
|
||||
|
||||
if (line.hasOption('a')) {
|
||||
pingConfiguredSearchEngines();
|
||||
}
|
||||
@@ -140,6 +147,29 @@ public class GenerateSitemaps {
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs generate-sitemaps without any params for the scheduler (task-scheduler.xml).
|
||||
*
|
||||
* @throws SQLException if a database error occurs.
|
||||
* @throws IOException if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemapsScheduled() throws IOException, SQLException {
|
||||
generateSitemaps(true, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the sitemaps directory and its contents if it exists
|
||||
* @throws IOException if IO error occurs
|
||||
*/
|
||||
public static void deleteSitemaps() throws IOException {
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.isDirectory()) {
|
||||
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
|
||||
} else {
|
||||
FileUtils.deleteDirectory(outputDir);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate sitemap.org protocol and/or basic HTML sitemaps.
|
||||
*
|
||||
@@ -150,14 +180,9 @@ public class GenerateSitemaps {
|
||||
* @throws IOException if IO error
|
||||
* if IO error occurs.
|
||||
*/
|
||||
public static void generateSitemaps(boolean makeHTMLMap,
|
||||
boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String sitemapStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/sitemap";
|
||||
String htmlMapStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/htmlmap";
|
||||
String handleURLStem = configurationService.getProperty("dspace.ui.url")
|
||||
+ "/handle/";
|
||||
public static void generateSitemaps(boolean makeHTMLMap, boolean makeSitemapOrg) throws SQLException, IOException {
|
||||
String uiURLStem = configurationService.getProperty("dspace.ui.url");
|
||||
String sitemapStem = uiURLStem + "/sitemap";
|
||||
|
||||
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (!outputDir.exists() && !outputDir.mkdir()) {
|
||||
@@ -168,13 +193,11 @@ public class GenerateSitemaps {
|
||||
AbstractGenerator sitemapsOrg = null;
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html = new HTMLSitemapGenerator(outputDir, htmlMapStem + "?map=",
|
||||
null);
|
||||
html = new HTMLSitemapGenerator(outputDir, sitemapStem, ".html");
|
||||
}
|
||||
|
||||
if (makeSitemapOrg) {
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem
|
||||
+ "?map=", null);
|
||||
sitemapsOrg = new SitemapsOrgGenerator(outputDir, sitemapStem, ".xml");
|
||||
}
|
||||
|
||||
Context c = new Context(Context.Mode.READ_ONLY);
|
||||
@@ -182,7 +205,7 @@ public class GenerateSitemaps {
|
||||
List<Community> comms = communityService.findAll(c);
|
||||
|
||||
for (Community comm : comms) {
|
||||
String url = handleURLStem + comm.getHandle();
|
||||
String url = uiURLStem + "/communities/" + comm.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -197,7 +220,7 @@ public class GenerateSitemaps {
|
||||
List<Collection> colls = collectionService.findAll(c);
|
||||
|
||||
for (Collection coll : colls) {
|
||||
String url = handleURLStem + coll.getHandle();
|
||||
String url = uiURLStem + "/collections/" + coll.getID();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
html.addURL(url, null);
|
||||
@@ -214,7 +237,7 @@ public class GenerateSitemaps {
|
||||
|
||||
while (allItems.hasNext()) {
|
||||
Item i = allItems.next();
|
||||
String url = handleURLStem + i.getHandle();
|
||||
String url = uiURLStem + "/items/" + i.getID();
|
||||
Date lastMod = i.getLastModified();
|
||||
|
||||
if (makeHTMLMap) {
|
||||
|
@@ -59,7 +59,7 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public String getFilename(int number) {
|
||||
return "sitemap" + number + ".xml.gz";
|
||||
return "sitemap" + number + ".xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -100,12 +100,12 @@ public class SitemapsOrgGenerator extends AbstractGenerator {
|
||||
|
||||
@Override
|
||||
public boolean useCompression() {
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIndexFilename() {
|
||||
return "sitemap_index.xml.gz";
|
||||
return "sitemap_index.xml";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -57,7 +57,6 @@ import org.dspace.harvest.HarvestedCollection;
|
||||
import org.dspace.harvest.service.HarvestedCollectionService;
|
||||
import org.dspace.workflow.factory.WorkflowServiceFactory;
|
||||
import org.dspace.xmlworkflow.WorkflowConfigurationException;
|
||||
import org.dspace.xmlworkflow.XmlWorkflowFactoryImpl;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
|
||||
import org.dspace.xmlworkflow.state.Workflow;
|
||||
import org.dspace.xmlworkflow.storedcomponents.CollectionRole;
|
||||
@@ -387,7 +386,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
log.error(LogManager.getHeader(context, "setWorkflowGroup",
|
||||
"collection_id=" + collection.getID() + " " + e.getMessage()), e);
|
||||
}
|
||||
if (!StringUtils.equals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID())) {
|
||||
if (!StringUtils.equals(workflowFactory.getDefaultWorkflow().getID(), workflow.getID())) {
|
||||
throw new IllegalArgumentException(
|
||||
"setWorkflowGroup can be used only on collection with the default basic dspace workflow. "
|
||||
+ "Instead, the collection: "
|
||||
|
@@ -207,8 +207,8 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values) throws SQLException {
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values) throws SQLException {
|
||||
MetadataField metadataField = metadataFieldService.findByElement(context, schema, element, qualifier);
|
||||
if (metadataField == null) {
|
||||
throw new SQLException(
|
||||
@@ -216,12 +216,12 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"exist!");
|
||||
}
|
||||
|
||||
addMetadata(context, dso, metadataField, lang, values);
|
||||
return addMetadata(context, dso, metadataField, lang, values);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
throws SQLException {
|
||||
// We will not verify that they are valid entries in the registry
|
||||
// until update() is called.
|
||||
@@ -231,15 +231,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"bad_dublin_core schema=" + schema + "." + element + "." + qualifier + ". Metadata field does not " +
|
||||
"exist!");
|
||||
}
|
||||
addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
|
||||
return addMetadata(context, dso, metadataField, lang, values, authorities, confidences);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
|
||||
List<String> authorities, List<Integer> confidences)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
throws SQLException {
|
||||
boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField);
|
||||
boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField);
|
||||
List<MetadataValue> newMetadata = new ArrayList<>(values.size());
|
||||
// We will not verify that they are valid entries in the registry
|
||||
// until update() is called.
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
@@ -250,6 +251,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
}
|
||||
MetadataValue metadataValue = metadataValueService.create(context, dso, metadataField);
|
||||
newMetadata.add(metadataValue);
|
||||
//Set place to list length of all metadatavalues for the given schema.element.qualifier combination.
|
||||
// Subtract one to adhere to the 0 as first element rule
|
||||
metadataValue.setPlace(
|
||||
@@ -304,29 +306,31 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
// metadataValueService.update(context, metadataValue);
|
||||
dso.addDetails(metadataField.toString());
|
||||
}
|
||||
return newMetadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
|
||||
String authority, int confidence) throws SQLException {
|
||||
addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence));
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
String value, String authority, int confidence) throws SQLException {
|
||||
return addMetadata(context, dso, metadataField, language, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value) throws SQLException {
|
||||
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value));
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value) throws SQLException {
|
||||
return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
throws SQLException {
|
||||
addMetadata(context, dso, metadataField, language, Arrays.asList(value));
|
||||
return addMetadata(context, dso, metadataField, language, Arrays.asList(value)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List<String> values)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
List<String> values)
|
||||
throws SQLException {
|
||||
if (metadataField != null) {
|
||||
String fieldKey = metadataAuthorityService
|
||||
@@ -343,18 +347,19 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
getAuthoritiesAndConfidences(fieldKey, null, values, authorities, confidences, i);
|
||||
}
|
||||
}
|
||||
addMetadata(context, dso, metadataField, language, values, authorities, confidences);
|
||||
return addMetadata(context, dso, metadataField, language, values, authorities, confidences);
|
||||
} else {
|
||||
addMetadata(context, dso, metadataField, language, values, null, null);
|
||||
return addMetadata(context, dso, metadataField, language, values, null, null);
|
||||
}
|
||||
}
|
||||
return new ArrayList<>(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value, String authority, int confidence) throws SQLException {
|
||||
addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value), Arrays.asList(authority),
|
||||
Arrays.asList(confidence));
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence) throws SQLException {
|
||||
return addMetadata(context, dso, schema, element, qualifier, lang, Arrays.asList(value),
|
||||
Arrays.asList(authority), Arrays.asList(confidence)).get(0);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -660,33 +665,35 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
@Override
|
||||
public void addAndShiftRightMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence, int index)
|
||||
throws SQLException {
|
||||
throws SQLException {
|
||||
|
||||
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
int place = 0;
|
||||
boolean last = true;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == index) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
MetadataValue newMetadata = addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
|
||||
moveSingleMetadataValue(context, dso, place, newMetadata);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, rr);
|
||||
place++;
|
||||
idx++;
|
||||
}
|
||||
if (last) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
lang, value, authority, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void moveMetadata(Context context, T dso, String schema, String element, String qualifier, int from, int to)
|
||||
throws SQLException, IllegalArgumentException {
|
||||
throws SQLException, IllegalArgumentException {
|
||||
|
||||
if (from == to) {
|
||||
throw new IllegalArgumentException("The \"from\" location MUST be different from \"to\" location");
|
||||
@@ -701,8 +708,6 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
"\n Idx from:" + from + " Idx to: " + to);
|
||||
}
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
MetadataValue moved = null;
|
||||
for (MetadataValue md : list) {
|
||||
@@ -714,49 +719,46 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
|
||||
}
|
||||
|
||||
idx = 0;
|
||||
int place = 0;
|
||||
boolean last = true;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == to && to < from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
if (idx != from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, rr.getLanguage(), rr.getValue(),
|
||||
rr.getAuthority(), rr.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, rr);
|
||||
place++;
|
||||
}
|
||||
if (idx == to && to > from) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
place++;
|
||||
last = false;
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
if (last) {
|
||||
addMetadata(context, dso, schema, element, qualifier, moved.getLanguage(), moved.getValue(),
|
||||
moved.getAuthority(), moved.getConfidence());
|
||||
moveSingleMetadataValue(context, dso, place, moved);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Supports moving metadata by updating the place of the metadata value
|
||||
*/
|
||||
protected void moveSingleMetadataValue(Context context, T dso, int place, MetadataValue rr) {
|
||||
//just move the metadata
|
||||
rr.setPlace(place);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void replaceMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value, String authority, int confidence, int index) throws SQLException {
|
||||
|
||||
List<MetadataValue> list = getMetadata(dso, schema, element, qualifier);
|
||||
|
||||
clearMetadata(context, dso, schema, element, qualifier, Item.ANY);
|
||||
|
||||
int idx = 0;
|
||||
for (MetadataValue rr : list) {
|
||||
if (idx == index) {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
lang, value, authority, confidence);
|
||||
} else {
|
||||
addMetadata(context, dso, schema, element, qualifier,
|
||||
rr.getLanguage(), rr.getValue(), rr.getAuthority(), rr.getConfidence());
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
removeMetadataValues(context, dso, Arrays.asList(list.get(index)));
|
||||
addAndShiftRightMetadata(context, dso, schema, element, qualifier, lang, value, authority, confidence, index);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@@ -1372,6 +1372,32 @@ prevent the generation of resource policy entry values with null dspace_object a
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Supports moving metadata by adding the metadata value or updating the place of the relationship
|
||||
*/
|
||||
@Override
|
||||
protected void moveSingleMetadataValue(Context context, Item dso, int place, MetadataValue rr) {
|
||||
if (rr instanceof RelationshipMetadataValue) {
|
||||
try {
|
||||
//Retrieve the applicable relationship
|
||||
Relationship rs = relationshipService.find(context,
|
||||
((RelationshipMetadataValue) rr).getRelationshipId());
|
||||
if (rs.getLeftItem() == dso) {
|
||||
rs.setLeftPlace(place);
|
||||
} else {
|
||||
rs.setRightPlace(place);
|
||||
}
|
||||
relationshipService.update(context, rs);
|
||||
} catch (Exception e) {
|
||||
//should not occur, otherwise metadata can't be updated either
|
||||
log.error("An error occurred while moving " + rr.getAuthority() + " for item " + dso.getID(), e);
|
||||
}
|
||||
} else {
|
||||
//just move the metadata
|
||||
rr.setPlace(place);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will sort the List of MetadataValue objects based on the MetadataSchema, MetadataField Element,
|
||||
* MetadataField Qualifier and MetadataField Place in that order.
|
||||
|
@@ -9,6 +9,8 @@ package org.dspace.content;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
@@ -20,8 +22,12 @@ import org.dspace.content.dao.MetadataFieldDAO;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.SiteService;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
||||
import org.dspace.event.Event;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
@@ -46,6 +52,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
||||
protected MetadataValueService metadataValueService;
|
||||
@Autowired(required = true)
|
||||
protected MetadataSchemaService metadataSchemaService;
|
||||
@Autowired
|
||||
protected SiteService siteService;
|
||||
|
||||
protected MetadataFieldServiceImpl() {
|
||||
|
||||
@@ -77,6 +85,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
||||
|
||||
log.info(LogManager.getHeader(context, "create_metadata_field",
|
||||
"metadata_field_id=" + metadataField.getID()));
|
||||
// Update the index of type metadatafield
|
||||
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
||||
return metadataField;
|
||||
}
|
||||
|
||||
@@ -149,6 +159,8 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
||||
"metadata_field_id=" + metadataField.getID() + "element=" + metadataField
|
||||
.getElement()
|
||||
+ "qualifier=" + metadataField.getQualifier()));
|
||||
// Update the index of type metadatafield
|
||||
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -177,6 +189,21 @@ public class MetadataFieldServiceImpl implements MetadataFieldService {
|
||||
|
||||
log.info(LogManager.getHeader(context, "delete_metadata_field",
|
||||
"metadata_field_id=" + metadataField.getID()));
|
||||
// Update the index of type metadatafield
|
||||
this.triggerEventToUpdateIndex(context, metadataField.getID());
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls a MODIFY SITE event with the identifier of the changed mdField, so it can be indexed in
|
||||
* {@link org.dspace.discovery.IndexEventConsumer}, with type of {@link org.dspace.discovery.IndexableObject} in
|
||||
* {@link Event}.detail and the identifiers of the changed mdFields in {@link Event}.identifiers
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param mdFieldId ID of the metadata field that needs to be (re)indexed
|
||||
*/
|
||||
private void triggerEventToUpdateIndex(Context context, int mdFieldId) {
|
||||
context.addEvent(new Event(Event.MODIFY, Constants.SITE, null, IndexableMetadataField.TYPE, new ArrayList<>(
|
||||
Arrays.asList(Integer.toString(mdFieldId)))));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.content;
|
||||
|
||||
import org.dspace.core.Constants;
|
||||
|
||||
/**
|
||||
* This class is used as a representation of MetadataValues for the MetadataValues that are derived from the
|
||||
* Relationships that the item has. This includes the useForPlace property which we'll have to use to determine
|
||||
@@ -57,4 +59,13 @@ public class RelationshipMetadataValue extends MetadataValue {
|
||||
}
|
||||
return super.equals(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the Relationship ID from which the current RelationshipMetadataValue is derived
|
||||
*
|
||||
* @return the relationship ID
|
||||
*/
|
||||
public int getRelationshipId() {
|
||||
return Integer.parseInt(getAuthority().substring(Constants.VIRTUAL_AUTHORITY_PREFIX.length()));
|
||||
}
|
||||
}
|
||||
|
@@ -265,7 +265,12 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
|
||||
|
||||
// Need to delete the workspaceitem row first since it refers
|
||||
// to item ID
|
||||
workspaceItem.getSupervisorGroups().clear();
|
||||
try {
|
||||
workspaceItem.getSupervisorGroups().clear();
|
||||
} catch (Exception e) {
|
||||
log.error("failed to clear supervisor group", e);
|
||||
}
|
||||
|
||||
workspaceItemDAO.delete(context, workspaceItem);
|
||||
|
||||
}
|
||||
|
@@ -200,10 +200,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* and the ISO3166 country code. <code>null</code> means the
|
||||
* value has no language (for example, a date).
|
||||
* @param values the values to add.
|
||||
* @return the list of MetadataValues added to the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values) throws SQLException;
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values) throws SQLException;
|
||||
|
||||
/**
|
||||
* Add metadata fields. These are appended to existing values.
|
||||
@@ -223,10 +224,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* @param values the values to add.
|
||||
* @param authorities the external authority key for this value (or null)
|
||||
* @param confidences the authority confidence (default 0)
|
||||
* @return the list of MetadataValues added to the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, List<String> values, List<String> authorities, List<Integer> confidences)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
@@ -243,32 +245,64 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* @param values the values to add.
|
||||
* @param authorities the external authority key for this value (or null)
|
||||
* @param confidences the authority confidence (default 0)
|
||||
* @return the list of MetadataValues added to the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
|
||||
List<String> authorities, List<Integer> confidences) throws SQLException;
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String lang,
|
||||
List<String> values, List<String> authorities, List<Integer> confidences) throws SQLException;
|
||||
|
||||
/**
|
||||
* Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single
|
||||
* value need to be added
|
||||
*
|
||||
* @param context
|
||||
* @param dso
|
||||
* @param metadataField
|
||||
* @param language
|
||||
* @param value
|
||||
* @param authority
|
||||
* @param confidence
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param dso DSpaceObject
|
||||
* @param metadataField the metadata field to which the value is to be set
|
||||
* @param language the ISO639 language code, optionally followed by an underscore
|
||||
* and the ISO3166 country code. <code>null</code> means the
|
||||
* value has no language (for example, a date).
|
||||
* @param value the value to add.
|
||||
* @param authority the external authority key for this value (or null)
|
||||
* @param confidence the authority confidence (default 0)
|
||||
* @return the MetadataValue added ot the object
|
||||
* @throws SQLException
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
|
||||
String authority, int confidence) throws SQLException;
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
String value, String authority, int confidence) throws SQLException;
|
||||
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
/**
|
||||
* Add a metadatafield. These are appended to existing values.
|
||||
* Use <code>clearMetadata</code> to remove values.
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param dso DSpaceObject
|
||||
* @param metadataField the metadata field to which the value is to be set
|
||||
* @param language the ISO639 language code, optionally followed by an underscore
|
||||
* and the ISO3166 country code. <code>null</code> means the
|
||||
* value has no language (for example, a date).
|
||||
* @param value the value to add.
|
||||
* @return the MetadataValue added ot the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public MetadataValue addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
|
||||
throws SQLException;
|
||||
|
||||
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, List<String> values)
|
||||
throws SQLException;
|
||||
/**
|
||||
* Add a metadatafields. These are appended to existing values.
|
||||
* Use <code>clearMetadata</code> to remove values.
|
||||
*
|
||||
* @param context DSpace context
|
||||
* @param dso DSpaceObject
|
||||
* @param metadataField the metadata field to which the value is to be set
|
||||
* @param language the ISO639 language code, optionally followed by an underscore
|
||||
* and the ISO3166 country code. <code>null</code> means the
|
||||
* value has no language (for example, a date).
|
||||
* @param values the values to add.
|
||||
* @return the list of MetadataValues added to the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public List<MetadataValue> addMetadata(Context context, T dso, MetadataField metadataField, String language,
|
||||
List<String> values) throws SQLException;
|
||||
|
||||
/**
|
||||
* Add a single metadata field. This is appended to existing
|
||||
@@ -285,10 +319,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* and the ISO3166 country code. <code>null</code> means the
|
||||
* value has no language (for example, a date).
|
||||
* @param value the value to add.
|
||||
* @return the MetadataValue added ot the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value) throws SQLException;
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value) throws SQLException;
|
||||
|
||||
/**
|
||||
* Add a single metadata field. This is appended to existing
|
||||
@@ -307,10 +342,11 @@ public interface DSpaceObjectService<T extends DSpaceObject> {
|
||||
* @param value the value to add.
|
||||
* @param authority the external authority key for this value (or null)
|
||||
* @param confidence the authority confidence (default 0)
|
||||
* @return the MetadataValue added ot the object
|
||||
* @throws SQLException if database error
|
||||
*/
|
||||
public void addMetadata(Context context, T dso, String schema, String element, String qualifier, String lang,
|
||||
String value, String authority, int confidence) throws SQLException;
|
||||
public MetadataValue addMetadata(Context context, T dso, String schema, String element, String qualifier,
|
||||
String lang, String value, String authority, int confidence) throws SQLException;
|
||||
|
||||
/**
|
||||
* Clear metadata values. As with <code>getDC</code> above,
|
||||
|
371
dspace-api/src/main/java/org/dspace/curate/Curation.java
Normal file
371
dspace-api/src/main/java/org/dspace/curate/Curation.java
Normal file
@@ -0,0 +1,371 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.curate.factory.CurateServiceFactory;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* CurationCli provides command-line access to Curation tools and processes.
|
||||
*
|
||||
* @author richardrodgers
|
||||
*/
|
||||
public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
|
||||
protected EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
protected Context context;
|
||||
private CurationClientOptions curationClientOptions;
|
||||
|
||||
private String task;
|
||||
private String taskFile;
|
||||
private String id;
|
||||
private String queue;
|
||||
private String scope;
|
||||
private String reporter;
|
||||
private Map<String, String> parameters;
|
||||
private boolean verbose;
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (curationClientOptions == CurationClientOptions.HELP) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
Curator curator = initCurator();
|
||||
|
||||
// load curation tasks
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
long start = System.currentTimeMillis();
|
||||
handleCurationTask(curator);
|
||||
this.endScript(start);
|
||||
}
|
||||
|
||||
// process task queue
|
||||
if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
// process the task queue
|
||||
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
|
||||
.getSinglePlugin(TaskQueue.class);
|
||||
if (taskQueue == null) {
|
||||
super.handler.logError("No implementation configured for queue");
|
||||
throw new UnsupportedOperationException("No queue service available");
|
||||
}
|
||||
long timeRun = this.runQueue(taskQueue, curator);
|
||||
this.endScript(timeRun);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does the curation task (-t) or the task in the given file (-T).
|
||||
* Checks:
|
||||
* - if required option -i is missing.
|
||||
* - if option -t has a valid task option
|
||||
*/
|
||||
private void handleCurationTask(Curator curator) throws IOException, SQLException {
|
||||
String taskName;
|
||||
if (commandLine.hasOption('t')) {
|
||||
if (verbose) {
|
||||
handler.logInfo("Adding task: " + this.task);
|
||||
}
|
||||
curator.addTask(this.task);
|
||||
if (verbose && !curator.hasTask(this.task)) {
|
||||
handler.logInfo("Task: " + this.task + " not resolved");
|
||||
}
|
||||
} else if (commandLine.hasOption('T')) {
|
||||
// load taskFile
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new FileReader(this.taskFile));
|
||||
while ((taskName = reader.readLine()) != null) {
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Adding task: " + taskName);
|
||||
}
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
// run tasks against object
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Starting curation");
|
||||
super.handler.logInfo("Curating id: " + this.id);
|
||||
}
|
||||
if ("all".equals(this.id)) {
|
||||
// run on whole Site
|
||||
curator.curate(context,
|
||||
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
|
||||
} else {
|
||||
curator.curate(context, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs task queue (-q set)
|
||||
*
|
||||
* @param queue The task queue
|
||||
* @param curator The curator
|
||||
* @return Time when queue started
|
||||
*/
|
||||
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
|
||||
// use current time as our reader 'ticket'
|
||||
long ticket = System.currentTimeMillis();
|
||||
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
|
||||
while (entryIter.hasNext()) {
|
||||
TaskQueueEntry entry = entryIter.next();
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Curating id: " + entry.getObjectId());
|
||||
}
|
||||
curator.clear();
|
||||
// does entry relate to a DSO or workflow object?
|
||||
if (entry.getObjectId().indexOf('/') > 0) {
|
||||
for (String taskName : entry.getTaskNames()) {
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
curator.curate(context, entry.getObjectId());
|
||||
} else {
|
||||
// make eperson who queued task the effective user
|
||||
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
|
||||
if (agent != null) {
|
||||
context.setCurrentUser(agent);
|
||||
}
|
||||
CurateServiceFactory.getInstance().getWorkflowCuratorService()
|
||||
.curate(curator, context, entry.getObjectId());
|
||||
}
|
||||
}
|
||||
queue.release(this.queue, ticket, true);
|
||||
return ticket;
|
||||
}
|
||||
|
||||
/**
|
||||
* End of curation script; logs script time if -v verbose is set
|
||||
*
|
||||
* @param timeRun Time script was started
|
||||
* @throws SQLException If DSpace contextx can't complete
|
||||
*/
|
||||
private void endScript(long timeRun) throws SQLException {
|
||||
context.complete();
|
||||
if (verbose) {
|
||||
long elapsed = System.currentTimeMillis() - timeRun;
|
||||
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the curator with command line variables
|
||||
*
|
||||
* @return Initialised curator
|
||||
* @throws FileNotFoundException If file of command line variable -r reporter is not found
|
||||
*/
|
||||
private Curator initCurator() throws FileNotFoundException {
|
||||
Curator curator = new Curator();
|
||||
OutputStream reporterStream;
|
||||
if (null == this.reporter) {
|
||||
reporterStream = new NullOutputStream();
|
||||
} else if ("-".equals(this.reporter)) {
|
||||
reporterStream = System.out;
|
||||
} else {
|
||||
reporterStream = new PrintStream(this.reporter);
|
||||
}
|
||||
Writer reportWriter = new OutputStreamWriter(reporterStream);
|
||||
curator.setReporter(reportWriter);
|
||||
|
||||
if (this.scope != null) {
|
||||
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
|
||||
curator.setTransactionScope(txScope);
|
||||
}
|
||||
|
||||
curator.addParameters(parameters);
|
||||
// we are operating in batch mode, if anyone cares.
|
||||
curator.setInvoked(Curator.Invoked.BATCH);
|
||||
return curator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void printHelp() {
|
||||
super.printHelp();
|
||||
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
|
||||
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
|
||||
super.handler.logInfo("task queue: CurationCli -q monthly");
|
||||
}
|
||||
|
||||
@Override
|
||||
public CurationScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() throws ParseException {
|
||||
assignCurrentUserInContext();
|
||||
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
|
||||
|
||||
if (this.curationClientOptions != null) {
|
||||
this.initGeneralLineOptionsAndCheckIfValid();
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
this.initTaskLineOptionsAndCheckIfValid();
|
||||
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
this.queue = this.commandLine.getOptionValue('q');
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
|
||||
" specified");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will assign the currentUser to the {@link Context} variable which is also created in this method.
|
||||
* The instance of the method in this class will fetch the EPersonIdentifier from this class, this identifier
|
||||
* was given to this class upon instantiation, it'll then be used to find the {@link EPerson} associated with it
|
||||
* and this {@link EPerson} will be set as the currentUser of the created {@link Context}
|
||||
* @throws ParseException If something went wrong with the retrieval of the EPerson Identifier
|
||||
*/
|
||||
protected void assignCurrentUserInContext() throws ParseException {
|
||||
UUID currentUserUuid = this.getEpersonIdentifier();
|
||||
try {
|
||||
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
EPerson eperson = ePersonService.find(context, currentUserUuid);
|
||||
if (eperson == null) {
|
||||
super.handler.logError("EPerson not found: " + currentUserUuid);
|
||||
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
|
||||
}
|
||||
this.context.setCurrentUser(eperson);
|
||||
} catch (SQLException e) {
|
||||
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in some optional command line options.
|
||||
* Checks if there are missing required options or invalid values for options.
|
||||
*/
|
||||
private void initGeneralLineOptionsAndCheckIfValid() {
|
||||
// report file
|
||||
if (this.commandLine.hasOption('r')) {
|
||||
this.reporter = this.commandLine.getOptionValue('r');
|
||||
}
|
||||
|
||||
// parameters
|
||||
this.parameters = new HashMap<>();
|
||||
if (this.commandLine.hasOption('p')) {
|
||||
for (String parameter : this.commandLine.getOptionValues('p')) {
|
||||
String[] parts = parameter.split("=", 2);
|
||||
String name = parts[0].trim();
|
||||
String value;
|
||||
if (parts.length > 1) {
|
||||
value = parts[1].trim();
|
||||
} else {
|
||||
value = "true";
|
||||
}
|
||||
this.parameters.put(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// verbose
|
||||
verbose = false;
|
||||
if (commandLine.hasOption('v')) {
|
||||
verbose = true;
|
||||
}
|
||||
|
||||
// scope
|
||||
if (this.commandLine.getOptionValue('s') != null) {
|
||||
this.scope = this.commandLine.getOptionValue('s');
|
||||
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
|
||||
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
throw new IllegalArgumentException(
|
||||
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in required command line options for the task or taskFile option.
|
||||
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
|
||||
* Checks if -t task has a valid task option.
|
||||
* Checks if -T taskfile is a valid file.
|
||||
*/
|
||||
private void initTaskLineOptionsAndCheckIfValid() {
|
||||
// task or taskFile
|
||||
if (this.commandLine.hasOption('t')) {
|
||||
this.task = this.commandLine.getOptionValue('t');
|
||||
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
|
||||
super.handler
|
||||
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
throw new IllegalArgumentException(
|
||||
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
}
|
||||
} else if (this.commandLine.hasOption('T')) {
|
||||
this.taskFile = this.commandLine.getOptionValue('T');
|
||||
if (!(new File(this.taskFile).isFile())) {
|
||||
super.handler
|
||||
.logError("-T taskFile must be valid file: " + this.taskFile);
|
||||
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.commandLine.hasOption('i')) {
|
||||
this.id = this.commandLine.getOptionValue('i').toLowerCase();
|
||||
if (!this.id.equalsIgnoreCase("all")) {
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
DSpaceObject dso;
|
||||
try {
|
||||
dso = handleService.resolveToObject(this.context, id);
|
||||
} catch (SQLException e) {
|
||||
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
throw new IllegalArgumentException(
|
||||
"SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
}
|
||||
if (dso == null) {
|
||||
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,229 +7,27 @@
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.io.Writer;
|
||||
import java.sql.SQLException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.factory.CoreServiceFactory;
|
||||
import org.dspace.curate.factory.CurateServiceFactory;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.EPersonService;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.utils.DSpace;
|
||||
|
||||
/**
|
||||
* CurationCli provides command-line access to Curation tools and processes.
|
||||
*
|
||||
* @author richardrodgers
|
||||
* This is the CLI version of the {@link Curation} script.
|
||||
* This will only be called when the curate script is called from a commandline instance.
|
||||
*/
|
||||
public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
|
||||
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
|
||||
|
||||
private Context context;
|
||||
private CurationClientOptions curationClientOptions;
|
||||
|
||||
private String task;
|
||||
private String taskFile;
|
||||
private String id;
|
||||
private String queue;
|
||||
private String scope;
|
||||
private String reporter;
|
||||
private Map<String, String> parameters;
|
||||
private boolean verbose;
|
||||
|
||||
@Override
|
||||
public void internalRun() throws Exception {
|
||||
if (curationClientOptions == CurationClientOptions.HELP) {
|
||||
printHelp();
|
||||
return;
|
||||
}
|
||||
|
||||
Curator curator = initCurator();
|
||||
|
||||
// load curation tasks
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
long start = System.currentTimeMillis();
|
||||
handleCurationTask(curator);
|
||||
this.endScript(start);
|
||||
}
|
||||
|
||||
// process task queue
|
||||
if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
// process the task queue
|
||||
TaskQueue taskQueue = (TaskQueue) CoreServiceFactory.getInstance().getPluginService()
|
||||
.getSinglePlugin(TaskQueue.class);
|
||||
if (taskQueue == null) {
|
||||
super.handler.logError("No implementation configured for queue");
|
||||
throw new UnsupportedOperationException("No queue service available");
|
||||
}
|
||||
long timeRun = this.runQueue(taskQueue, curator);
|
||||
this.endScript(timeRun);
|
||||
}
|
||||
}
|
||||
public class CurationCli extends Curation {
|
||||
|
||||
/**
|
||||
* Does the curation task (-t) or the task in the given file (-T).
|
||||
* Checks:
|
||||
* - if required option -i is missing.
|
||||
* - if option -t has a valid task option
|
||||
* This is the overridden instance of the {@link Curation#assignCurrentUserInContext()} method in the parent class
|
||||
* {@link Curation}.
|
||||
* This is done so that the CLI version of the Script is able to retrieve its currentUser from the -e flag given
|
||||
* with the parameters of the Script.
|
||||
* @throws ParseException If the e flag was not given to the parameters when calling the script
|
||||
*/
|
||||
private void handleCurationTask(Curator curator) throws IOException, SQLException {
|
||||
String taskName;
|
||||
if (commandLine.hasOption('t')) {
|
||||
if (verbose) {
|
||||
handler.logInfo("Adding task: " + this.task);
|
||||
}
|
||||
curator.addTask(this.task);
|
||||
if (verbose && !curator.hasTask(this.task)) {
|
||||
handler.logInfo("Task: " + this.task + " not resolved");
|
||||
}
|
||||
} else if (commandLine.hasOption('T')) {
|
||||
// load taskFile
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new FileReader(this.taskFile));
|
||||
while ((taskName = reader.readLine()) != null) {
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Adding task: " + taskName);
|
||||
}
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
// run tasks against object
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Starting curation");
|
||||
super.handler.logInfo("Curating id: " + this.id);
|
||||
}
|
||||
if ("all".equals(this.id)) {
|
||||
// run on whole Site
|
||||
curator.curate(context,
|
||||
ContentServiceFactory.getInstance().getSiteService().findSite(context).getHandle());
|
||||
} else {
|
||||
curator.curate(context, this.id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs task queue (-q set)
|
||||
*
|
||||
* @param queue The task queue
|
||||
* @param curator The curator
|
||||
* @return Time when queue started
|
||||
*/
|
||||
private long runQueue(TaskQueue queue, Curator curator) throws SQLException, AuthorizeException, IOException {
|
||||
// use current time as our reader 'ticket'
|
||||
long ticket = System.currentTimeMillis();
|
||||
Iterator<TaskQueueEntry> entryIter = queue.dequeue(this.queue, ticket).iterator();
|
||||
while (entryIter.hasNext()) {
|
||||
TaskQueueEntry entry = entryIter.next();
|
||||
if (verbose) {
|
||||
super.handler.logInfo("Curating id: " + entry.getObjectId());
|
||||
}
|
||||
curator.clear();
|
||||
// does entry relate to a DSO or workflow object?
|
||||
if (entry.getObjectId().indexOf('/') > 0) {
|
||||
for (String taskName : entry.getTaskNames()) {
|
||||
curator.addTask(taskName);
|
||||
}
|
||||
curator.curate(context, entry.getObjectId());
|
||||
} else {
|
||||
// make eperson who queued task the effective user
|
||||
EPerson agent = ePersonService.findByEmail(context, entry.getEpersonId());
|
||||
if (agent != null) {
|
||||
context.setCurrentUser(agent);
|
||||
}
|
||||
CurateServiceFactory.getInstance().getWorkflowCuratorService()
|
||||
.curate(curator, context, entry.getObjectId());
|
||||
}
|
||||
}
|
||||
queue.release(this.queue, ticket, true);
|
||||
return ticket;
|
||||
}
|
||||
|
||||
/**
|
||||
* End of curation script; logs script time if -v verbose is set
|
||||
*
|
||||
* @param timeRun Time script was started
|
||||
* @throws SQLException If DSpace contextx can't complete
|
||||
*/
|
||||
private void endScript(long timeRun) throws SQLException {
|
||||
context.complete();
|
||||
if (verbose) {
|
||||
long elapsed = System.currentTimeMillis() - timeRun;
|
||||
this.handler.logInfo("Ending curation. Elapsed time: " + elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the curator with command line variables
|
||||
*
|
||||
* @return Initialised curator
|
||||
* @throws FileNotFoundException If file of command line variable -r reporter is not found
|
||||
*/
|
||||
private Curator initCurator() throws FileNotFoundException {
|
||||
Curator curator = new Curator();
|
||||
OutputStream reporterStream;
|
||||
if (null == this.reporter) {
|
||||
reporterStream = new NullOutputStream();
|
||||
} else if ("-".equals(this.reporter)) {
|
||||
reporterStream = System.out;
|
||||
} else {
|
||||
reporterStream = new PrintStream(this.reporter);
|
||||
}
|
||||
Writer reportWriter = new OutputStreamWriter(reporterStream);
|
||||
curator.setReporter(reportWriter);
|
||||
|
||||
if (this.scope != null) {
|
||||
Curator.TxScope txScope = Curator.TxScope.valueOf(this.scope.toUpperCase());
|
||||
curator.setTransactionScope(txScope);
|
||||
}
|
||||
|
||||
curator.addParameters(parameters);
|
||||
// we are operating in batch mode, if anyone cares.
|
||||
curator.setInvoked(Curator.Invoked.BATCH);
|
||||
return curator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void printHelp() {
|
||||
super.printHelp();
|
||||
super.handler.logInfo("\nwhole repo: CurationCli -t estimate -i all");
|
||||
super.handler.logInfo("single item: CurationCli -t generate -i itemId");
|
||||
super.handler.logInfo("task queue: CurationCli -q monthly");
|
||||
}
|
||||
|
||||
@Override
|
||||
public CurationScriptConfiguration getScriptConfiguration() {
|
||||
return new DSpace().getServiceManager().getServiceByName("curate", CurationScriptConfiguration.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
protected void assignCurrentUserInContext() throws ParseException {
|
||||
if (this.commandLine.hasOption('e')) {
|
||||
String ePersonEmail = this.commandLine.getOptionValue('e');
|
||||
this.context = new Context(Context.Mode.BATCH_EDIT);
|
||||
@@ -244,119 +42,7 @@ public class CurationCli extends DSpaceRunnable<CurationScriptConfiguration> {
|
||||
throw new IllegalArgumentException("SQLException trying to find user with email: " + ePersonEmail);
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Needs an -e to set eperson (admin)");
|
||||
}
|
||||
this.curationClientOptions = CurationClientOptions.getClientOption(commandLine);
|
||||
|
||||
if (this.curationClientOptions != null) {
|
||||
this.initGeneralLineOptionsAndCheckIfValid();
|
||||
if (curationClientOptions == CurationClientOptions.TASK) {
|
||||
this.initTaskLineOptionsAndCheckIfValid();
|
||||
} else if (curationClientOptions == CurationClientOptions.QUEUE) {
|
||||
this.queue = this.commandLine.getOptionValue('q');
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("[--help || --task|--taskfile <> -identifier <> || -queue <> ] must be" +
|
||||
" specified");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in some optional command line options.
|
||||
* Checks if there are missing required options or invalid values for options.
|
||||
*/
|
||||
private void initGeneralLineOptionsAndCheckIfValid() {
|
||||
// report file
|
||||
if (this.commandLine.hasOption('r')) {
|
||||
this.reporter = this.commandLine.getOptionValue('r');
|
||||
}
|
||||
|
||||
// parameters
|
||||
this.parameters = new HashMap<>();
|
||||
if (this.commandLine.hasOption('p')) {
|
||||
for (String parameter : this.commandLine.getOptionValues('p')) {
|
||||
String[] parts = parameter.split("=", 2);
|
||||
String name = parts[0].trim();
|
||||
String value;
|
||||
if (parts.length > 1) {
|
||||
value = parts[1].trim();
|
||||
} else {
|
||||
value = "true";
|
||||
}
|
||||
this.parameters.put(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
// verbose
|
||||
verbose = false;
|
||||
if (commandLine.hasOption('v')) {
|
||||
verbose = true;
|
||||
}
|
||||
|
||||
// scope
|
||||
if (this.commandLine.getOptionValue('s') != null) {
|
||||
this.scope = this.commandLine.getOptionValue('s');
|
||||
if (this.scope != null && Curator.TxScope.valueOf(this.scope.toUpperCase()) == null) {
|
||||
this.handler.logError("Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
throw new IllegalArgumentException(
|
||||
"Bad transaction scope '" + this.scope + "': only 'object', 'curation' or " +
|
||||
"'open' recognized");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills in required command line options for the task or taskFile option.
|
||||
* Checks if there are is a missing required -i option and if -i is either 'all' or a valid dso handle.
|
||||
* Checks if -t task has a valid task option.
|
||||
* Checks if -T taskfile is a valid file.
|
||||
*/
|
||||
private void initTaskLineOptionsAndCheckIfValid() {
|
||||
// task or taskFile
|
||||
if (this.commandLine.hasOption('t')) {
|
||||
this.task = this.commandLine.getOptionValue('t');
|
||||
if (!CurationClientOptions.getTaskOptions().contains(this.task)) {
|
||||
super.handler
|
||||
.logError("-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
throw new IllegalArgumentException(
|
||||
"-t task must be one of: " + CurationClientOptions.getTaskOptions());
|
||||
}
|
||||
} else if (this.commandLine.hasOption('T')) {
|
||||
this.taskFile = this.commandLine.getOptionValue('T');
|
||||
if (!(new File(this.taskFile).isFile())) {
|
||||
super.handler
|
||||
.logError("-T taskFile must be valid file: " + this.taskFile);
|
||||
throw new IllegalArgumentException("-T taskFile must be valid file: " + this.taskFile);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.commandLine.hasOption('i')) {
|
||||
this.id = this.commandLine.getOptionValue('i').toLowerCase();
|
||||
if (!this.id.equalsIgnoreCase("all")) {
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
DSpaceObject dso;
|
||||
try {
|
||||
dso = handleService.resolveToObject(this.context, id);
|
||||
} catch (SQLException e) {
|
||||
super.handler.logError("SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
throw new IllegalArgumentException(
|
||||
"SQLException trying to resolve handle " + id + " to a valid dso");
|
||||
}
|
||||
if (dso == null) {
|
||||
super.handler.logError("Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a valid dso handle or 'all'; " + this.id + " could " +
|
||||
"not be resolved to valid dso handle");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
super.handler.logError("Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
throw new IllegalArgumentException(
|
||||
"Id must be specified: a handle, 'all', or no -i and a -q task queue (-h for " +
|
||||
"help)");
|
||||
throw new ParseException("Required parameter -e missing!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
/**
|
||||
* This is the CLI version of the {@link CurationScriptConfiguration} class that handles the configuration for the
|
||||
* {@link CurationCli} script
|
||||
*/
|
||||
public class CurationCliScriptConfiguration extends CurationScriptConfiguration<Curation> {
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
options = super.getOptions();
|
||||
options.addOption("e", "eperson", true, "email address of curating eperson");
|
||||
options.getOption("e").setType(String.class);
|
||||
options.getOption("e").setRequired(true);
|
||||
return options;
|
||||
}
|
||||
}
|
@@ -45,6 +45,11 @@ public enum CurationClientOptions {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will create all the possible Options for the {@link Curation} script.
|
||||
* This will be used by {@link CurationScriptConfiguration}
|
||||
* @return The options for the {@link Curation} script
|
||||
*/
|
||||
protected static Options constructOptions() {
|
||||
Options options = new Options();
|
||||
|
||||
@@ -54,7 +59,6 @@ public enum CurationClientOptions {
|
||||
"Id (handle) of object to perform task on, or 'all' to perform on whole repository");
|
||||
options.addOption("p", "parameter", true, "a task parameter 'NAME=VALUE'");
|
||||
options.addOption("q", "queue", true, "name of task queue to process");
|
||||
options.addOption("e", "eperson", true, "email address of curating eperson");
|
||||
options.addOption("r", "reporter", true,
|
||||
"relative or absolute path to the desired report file. Use '-' to report to console. If absent, no " +
|
||||
"reporting");
|
||||
|
@@ -16,11 +16,11 @@ import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link CurationCli} script
|
||||
* The {@link ScriptConfiguration} for the {@link Curation} script
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 23/06/2020
|
||||
*/
|
||||
public class CurationScriptConfiguration<T extends CurationCli> extends ScriptConfiguration<T> {
|
||||
public class CurationScriptConfiguration<T extends Curation> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.discovery;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -15,6 +16,7 @@ import org.dspace.content.Bundle;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.core.Constants;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.indexobject.factory.IndexFactory;
|
||||
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
|
||||
import org.dspace.event.Consumer;
|
||||
import org.dspace.event.Event;
|
||||
@@ -67,7 +69,7 @@ public class IndexEventConsumer implements Consumer {
|
||||
|
||||
int st = event.getSubjectType();
|
||||
if (!(st == Constants.ITEM || st == Constants.BUNDLE
|
||||
|| st == Constants.COLLECTION || st == Constants.COMMUNITY)) {
|
||||
|| st == Constants.COLLECTION || st == Constants.COMMUNITY || st == Constants.SITE)) {
|
||||
log
|
||||
.warn("IndexConsumer should not have been given this kind of Subject in an event, skipping: "
|
||||
+ event.toString());
|
||||
@@ -104,10 +106,28 @@ public class IndexEventConsumer implements Consumer {
|
||||
case Event.MODIFY:
|
||||
case Event.MODIFY_METADATA:
|
||||
if (subject == null) {
|
||||
log.warn(event.getEventTypeAsString() + " event, could not get object for "
|
||||
if (st == Constants.SITE) {
|
||||
// Update the indexable objects of type in event.detail of objects with ids in event.identifiers
|
||||
for (String id : event.getIdentifiers()) {
|
||||
IndexFactory indexableObjectService = IndexObjectFactoryFactory.getInstance().
|
||||
getIndexFactoryByType(event.getDetail());
|
||||
Optional<IndexableObject> indexableObject = Optional.empty();
|
||||
indexableObject = indexableObjectService.findIndexableObject(ctx, id);
|
||||
if (indexableObject.isPresent()) {
|
||||
log.debug("consume() adding event to update queue: " + event.toString());
|
||||
objectsToUpdate
|
||||
.addAll(indexObjectServiceFactory
|
||||
.getIndexableObjects(ctx, indexableObject.get().getIndexedObject()));
|
||||
} else {
|
||||
log.warn("Cannot resolve " + id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.warn(event.getEventTypeAsString() + " event, could not get object for "
|
||||
+ event.getSubjectTypeAsString() + " id="
|
||||
+ event.getSubjectID()
|
||||
+ ", perhaps it has been deleted.");
|
||||
}
|
||||
} else {
|
||||
log.debug("consume() adding event to update queue: " + event.toString());
|
||||
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
|
||||
|
@@ -12,6 +12,7 @@ import java.sql.SQLException;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.collections4.ListUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
@@ -56,7 +57,7 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
doc.addField(SearchUtils.RESOURCE_ID_FIELD, indexableObject.getID().toString());
|
||||
|
||||
//Do any additional indexing, depends on the plugins
|
||||
for (SolrServiceIndexPlugin solrServiceIndexPlugin : solrServiceIndexPlugins) {
|
||||
for (SolrServiceIndexPlugin solrServiceIndexPlugin : ListUtils.emptyIfNull(solrServiceIndexPlugins)) {
|
||||
solrServiceIndexPlugin.additionalIndex(context, indexableObject, doc);
|
||||
}
|
||||
|
||||
@@ -190,4 +191,4 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
|
||||
public void deleteAll() throws IOException, SolrServerException {
|
||||
solrSearchCore.getSolr().deleteByQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + getType());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.discovery.indexobject;
|
||||
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
|
||||
/**
|
||||
* {@link MetadataField} implementation for the {@link IndexableObject}
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 14/07/2020
|
||||
*/
|
||||
public class IndexableMetadataField implements IndexableObject<MetadataField, Integer> {
|
||||
|
||||
private MetadataField metadataField;
|
||||
public static final String TYPE = MetadataField.class.getSimpleName();
|
||||
|
||||
public IndexableMetadataField(MetadataField metadataField) {
|
||||
this.metadataField = metadataField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Integer getID() {
|
||||
return this.metadataField.getID();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MetadataField getIndexedObject() {
|
||||
return this.metadataField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setIndexedObject(MetadataField metadataField) {
|
||||
this.metadataField = metadataField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTypeText() {
|
||||
return TYPE.toUpperCase();
|
||||
}
|
||||
}
|
@@ -0,0 +1,109 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.discovery.indexobject;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.indexobject.factory.MetadataFieldIndexFactory;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.factory.EPersonServiceFactory;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Factory implementation for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 14/07/2020
|
||||
*/
|
||||
public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl<IndexableMetadataField, MetadataField>
|
||||
implements MetadataFieldIndexFactory {
|
||||
|
||||
public static final String SCHEMA_FIELD_NAME = "schema";
|
||||
public static final String ELEMENT_FIELD_NAME = "element";
|
||||
public static final String QUALIFIER_FIELD_NAME = "qualifier";
|
||||
public static final String FIELD_NAME_VARIATIONS = "fieldName";
|
||||
|
||||
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
|
||||
|
||||
@Override
|
||||
public SolrInputDocument buildDocument(Context context, IndexableMetadataField indexableObject) throws SQLException,
|
||||
IOException {
|
||||
// Add the ID's, types and call the SolrServiceIndexPlugins
|
||||
final SolrInputDocument doc = super.buildDocument(context, indexableObject);
|
||||
final MetadataField metadataField = indexableObject.getIndexedObject();
|
||||
// add schema, element, qualifier and full fieldName
|
||||
addFacetIndex(doc, SCHEMA_FIELD_NAME, metadataField.getMetadataSchema().getName(),
|
||||
metadataField.getMetadataSchema().getName());
|
||||
addFacetIndex(doc, ELEMENT_FIELD_NAME, metadataField.getElement(), metadataField.getElement());
|
||||
String fieldName = metadataField.toString().replace('_', '.');
|
||||
addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName, fieldName);
|
||||
if (StringUtils.isNotBlank(metadataField.getQualifier())) {
|
||||
addFacetIndex(doc, QUALIFIER_FIELD_NAME, metadataField.getQualifier(), metadataField.getQualifier());
|
||||
addFacetIndex(doc, FIELD_NAME_VARIATIONS, fieldName,
|
||||
metadataField.getElement() + "." + metadataField.getQualifier());
|
||||
addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getQualifier(), metadataField.getQualifier());
|
||||
} else {
|
||||
addFacetIndex(doc, FIELD_NAME_VARIATIONS, metadataField.getElement(), metadataField.getElement());
|
||||
}
|
||||
addNamedResourceTypeIndex(doc, indexableObject.getTypeText());
|
||||
Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
|
||||
// add read permission on doc for anonymous group
|
||||
doc.addField("read", "g" + anonymousGroup.getID());
|
||||
return doc;
|
||||
}
|
||||
|
||||
@Autowired
|
||||
private MetadataFieldService metadataFieldService;
|
||||
|
||||
@Override
|
||||
public Iterator<IndexableMetadataField> findAll(Context context) throws SQLException {
|
||||
final Iterator<MetadataField> metadataFields = metadataFieldService.findAll(context).iterator();
|
||||
return new Iterator<>() {
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return metadataFields.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexableMetadataField next() {
|
||||
return new IndexableMetadataField(metadataFields.next());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return IndexableMetadataField.TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<IndexableMetadataField> findIndexableObject(Context context, String id) throws SQLException {
|
||||
final MetadataField metadataField = metadataFieldService.find(context, Integer.parseInt(id));
|
||||
return metadataField == null ? Optional.empty() : Optional.of(new IndexableMetadataField(metadataField));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(Object object) {
|
||||
return object instanceof MetadataField;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List getIndexableObjects(Context context, MetadataField object) {
|
||||
return Arrays.asList(new IndexableMetadataField(object));
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.discovery.indexobject.factory;
|
||||
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
||||
|
||||
/**
|
||||
* Factory interface for indexing/retrieving {@link org.dspace.content.MetadataField} items in the search core
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 14/07/2020
|
||||
*/
|
||||
public interface MetadataFieldIndexFactory extends IndexFactory<IndexableMetadataField, MetadataField> {
|
||||
}
|
@@ -141,7 +141,7 @@ public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport {
|
||||
return false;
|
||||
}
|
||||
final EPerson other = (EPerson) obj;
|
||||
if (this.getID() != other.getID()) {
|
||||
if (!this.getID().equals(other.getID())) {
|
||||
return false;
|
||||
}
|
||||
if (!StringUtils.equals(this.getEmail(), other.getEmail())) {
|
||||
|
162
dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java
vendored
Normal file
162
dspace-api/src/main/java/org/dspace/external/provider/impl/LiveImportDataProvider.java
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.external.provider.impl;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.dto.MetadataValueDTO;
|
||||
import org.dspace.external.model.ExternalDataObject;
|
||||
import org.dspace.external.provider.ExternalDataProvider;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
|
||||
/**
|
||||
* This class allows to configure a Live Import Provider as an External Data Provider
|
||||
*
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
*
|
||||
*/
|
||||
public class LiveImportDataProvider implements ExternalDataProvider {
|
||||
/**
|
||||
* The {@link QuerySource} live import provider
|
||||
*/
|
||||
private QuerySource querySource;
|
||||
|
||||
/**
|
||||
* An unique human readable identifier for this provider
|
||||
*/
|
||||
private String sourceIdentifier;
|
||||
|
||||
private String recordIdMetadata;
|
||||
|
||||
private String displayMetadata = "dc.title";
|
||||
|
||||
@Override
|
||||
public String getSourceIdentifier() {
|
||||
return sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method set the SourceIdentifier for the ExternalDataProvider
|
||||
* @param sourceIdentifier The UNIQUE sourceIdentifier to be set on any LiveImport data provider
|
||||
*/
|
||||
public void setSourceIdentifier(String sourceIdentifier) {
|
||||
this.sourceIdentifier = sourceIdentifier;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method set the MetadataSource for the ExternalDataProvider
|
||||
* @param metadataSource {@link org.dspace.importer.external.service.components.MetadataSource} implementation used to process the input data
|
||||
*/
|
||||
public void setMetadataSource(QuerySource querySource) {
|
||||
this.querySource = querySource;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method set dublin core identifier to use as metadata id
|
||||
* @param recordIdMetadata dublin core identifier to use as metadata id
|
||||
*/
|
||||
public void setRecordIdMetadata(String recordIdMetadata) {
|
||||
this.recordIdMetadata = recordIdMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method set the dublin core identifier to display the title
|
||||
* @param displayMetadata metadata to use as title
|
||||
*/
|
||||
public void setDisplayMetadata(String displayMetadata) {
|
||||
this.displayMetadata = displayMetadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ExternalDataObject> getExternalDataObject(String id) {
|
||||
try {
|
||||
ExternalDataObject externalDataObject = getExternalDataObject(querySource.getRecord(id));
|
||||
return Optional.of(externalDataObject);
|
||||
} catch (MetadataSourceException e) {
|
||||
throw new RuntimeException(
|
||||
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ExternalDataObject> searchExternalDataObjects(String query, int start, int limit) {
|
||||
Collection<ImportRecord> records;
|
||||
try {
|
||||
records = querySource.getRecords(query, start, limit);
|
||||
return records.stream().map(r -> getExternalDataObject(r)).collect(Collectors.toList());
|
||||
} catch (MetadataSourceException e) {
|
||||
throw new RuntimeException(
|
||||
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supports(String source) {
|
||||
return StringUtils.equalsIgnoreCase(sourceIdentifier, source);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumberOfResults(String query) {
|
||||
try {
|
||||
return querySource.getRecordsCount(query);
|
||||
} catch (MetadataSourceException e) {
|
||||
throw new RuntimeException(
|
||||
"The live import provider " + querySource.getImportSource() + " throws an exception", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal method to convert an ImportRecord to an ExternalDataObject
|
||||
*
|
||||
* FIXME it would be useful to remove ImportRecord at all in favor of the
|
||||
* ExternalDataObject
|
||||
*
|
||||
* @param record
|
||||
* @return
|
||||
*/
|
||||
private ExternalDataObject getExternalDataObject(ImportRecord record) {
|
||||
//return 400 if no record were found
|
||||
if (record == null) {
|
||||
throw new IllegalArgumentException("No record found for query or id");
|
||||
}
|
||||
ExternalDataObject externalDataObject = new ExternalDataObject(sourceIdentifier);
|
||||
String id = getFirstValue(record, recordIdMetadata);
|
||||
String display = getFirstValue(record, displayMetadata);
|
||||
externalDataObject.setId(id);
|
||||
externalDataObject.setDisplayValue(display);
|
||||
externalDataObject.setValue(display);
|
||||
for (MetadatumDTO dto : record.getValueList()) {
|
||||
// FIXME it would be useful to remove MetadatumDTO in favor of MetadataValueDTO
|
||||
MetadataValueDTO mvDTO = new MetadataValueDTO();
|
||||
mvDTO.setSchema(dto.getSchema());
|
||||
mvDTO.setElement(dto.getElement());
|
||||
mvDTO.setQualifier(dto.getQualifier());
|
||||
mvDTO.setValue(dto.getValue());
|
||||
externalDataObject.addMetadata(mvDTO);
|
||||
}
|
||||
return externalDataObject;
|
||||
}
|
||||
|
||||
private String getFirstValue(ImportRecord record, String metadata) {
|
||||
String id = null;
|
||||
String[] split = StringUtils.split(metadata, ".", 3);
|
||||
Collection<MetadatumDTO> values = record.getValue(split[0], split[1], split.length == 3 ? split[2] : null);
|
||||
if (!values.isEmpty()) {
|
||||
id = (values.iterator().next().getValue());
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
}
|
@@ -761,9 +761,9 @@ public class DOIIdentifierProvider
|
||||
Item item = (Item) dso;
|
||||
|
||||
List<MetadataValue> metadata = itemService.getMetadata(item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null);
|
||||
String leftPart = DOI.RESOLVER + SLASH + getPrefix() + SLASH + getNamespaceSeparator();
|
||||
for (MetadataValue id : metadata) {
|
||||
if (id.getValue().startsWith(
|
||||
DOI.RESOLVER + String.valueOf(SLASH) + PREFIX + String.valueOf(SLASH) + NAMESPACE_SEPARATOR)) {
|
||||
if (id.getValue().startsWith(leftPart)) {
|
||||
return doiService.DOIFromExternalFormat(id.getValue());
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,37 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.arxiv.metadatamapping;
|
||||
|
||||
import java.util.Map;
|
||||
import javax.annotation.Resource;
|
||||
|
||||
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
|
||||
|
||||
/**
|
||||
* An implementation of {@link AbstractMetadataFieldMapping}
|
||||
* Responsible for defining the mapping of the ArXiv metadatum fields on the DSpace metadatum fields
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*/
|
||||
public class ArXivFieldMapping extends AbstractMetadataFieldMapping {
|
||||
|
||||
/**
|
||||
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
|
||||
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
|
||||
* what metadatafield is generated.
|
||||
*
|
||||
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
|
||||
* the item.
|
||||
*/
|
||||
@Override
|
||||
@Resource(name = "arxivMetadataFieldMap")
|
||||
public void setMetadataFieldMap(Map metadataFieldMap) {
|
||||
super.setMetadataFieldMap(metadataFieldMap);
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.arxiv.metadatamapping.contributor;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.axiom.om.OMElement;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.dspace.importer.external.metadatamapping.contributor.MetadataContributor;
|
||||
import org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor;
|
||||
|
||||
/**
|
||||
* Arxiv specific implementation of {@link MetadataContributor}
|
||||
* Responsible for generating the ArXiv Id from the retrieved item.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
public class ArXivIdMetadataContributor extends SimpleXpathMetadatumContributor {
|
||||
|
||||
/**
|
||||
* Retrieve the metadata associated with the given object.
|
||||
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
|
||||
* list
|
||||
*
|
||||
* @param t A class to retrieve metadata from.
|
||||
* @return a collection of import records. Only the identifier of the found records may be put in the record.
|
||||
*/
|
||||
@Override
|
||||
public Collection<MetadatumDTO> contributeMetadata(OMElement t) {
|
||||
Collection<MetadatumDTO> values = super.contributeMetadata(t);
|
||||
parseValue(values);
|
||||
return values;
|
||||
}
|
||||
|
||||
/**
|
||||
* ArXiv returns a full URL as in the <id> value, e.g. http://arxiv.org/abs/1911.11405v1.
|
||||
* This method parses out the identifier from the end of the URL, e.g. 1911.11405v1.
|
||||
*
|
||||
* @param dtos Metadata which contains the items uri
|
||||
*/
|
||||
private void parseValue(Collection<MetadatumDTO> dtos) {
|
||||
if (dtos != null) {
|
||||
for (MetadatumDTO dto : dtos) {
|
||||
if (dto != null && dto.getValue() != null && dto.getValue().contains("/")) {
|
||||
int startIndex = dto.getValue().lastIndexOf('/') + 1;
|
||||
int endIndex = dto.getValue().length();
|
||||
String id = dto.getValue().substring(startIndex, endIndex);
|
||||
dto.setValue(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -0,0 +1,421 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.importer.external.arxiv.service;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
import javax.el.MethodNotFoundException;
|
||||
import javax.ws.rs.client.Client;
|
||||
import javax.ws.rs.client.ClientBuilder;
|
||||
import javax.ws.rs.client.Invocation;
|
||||
import javax.ws.rs.client.WebTarget;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.apache.axiom.om.OMElement;
|
||||
import org.apache.axiom.om.OMXMLBuilderFactory;
|
||||
import org.apache.axiom.om.OMXMLParserWrapper;
|
||||
import org.apache.axiom.om.xpath.AXIOMXPath;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.importer.external.datamodel.ImportRecord;
|
||||
import org.dspace.importer.external.datamodel.Query;
|
||||
import org.dspace.importer.external.exception.MetadataSourceException;
|
||||
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
|
||||
import org.dspace.importer.external.service.components.QuerySource;
|
||||
import org.jaxen.JaxenException;
|
||||
|
||||
/**
|
||||
* Implements a data source for querying ArXiv
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
|
||||
*
|
||||
*/
|
||||
public class ArXivImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<OMElement>
|
||||
implements QuerySource {
|
||||
|
||||
private WebTarget webTarget;
|
||||
private String baseAddress;
|
||||
|
||||
/**
|
||||
* Find the number of records matching the query string in ArXiv. Supports pagination.
|
||||
*
|
||||
* @param query a query string to base the search on.
|
||||
* @param start offset to start at
|
||||
* @param count number of records to retrieve.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
|
||||
return retry(new SearchByQueryCallable(query, count, start));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find records based on a object query and convert them to a list metadata mapped in ImportRecord.
|
||||
* The entry with the key "query" of the Query's map will be used as query string value.
|
||||
*
|
||||
* @see org.dspace.importer.external.datamodel.Query
|
||||
* @see org.dspace.importer.external.datamodel.ImportRecord
|
||||
* @param query a query object to base the search on.
|
||||
* @return a set of records. Fully transformed.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
|
||||
return retry(new SearchByQueryCallable(query));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the number of records matching the query string in ArXiv;
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return the sum of the matching records over this import source
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getRecordsCount(String query) throws MetadataSourceException {
|
||||
return retry(new CountByQueryCallable(query));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find the number of records matching a query;
|
||||
* The entry with the key "query" of the Query's map will be used to get the query string.
|
||||
*
|
||||
* @see org.dspace.importer.external.datamodel.Query
|
||||
* @param query a query string to base the search on.
|
||||
* @return the sum of the matching records over this import source
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public int getRecordsCount(Query query) throws MetadataSourceException {
|
||||
return retry(new CountByQueryCallable(query));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single record of metadata from the arxiv by ArXiv ID.
|
||||
*
|
||||
* @param id id of the record in ArXiv
|
||||
* @return the first matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
|
||||
@Override
|
||||
public ImportRecord getRecord(String id) throws MetadataSourceException {
|
||||
List<ImportRecord> records = retry(new SearchByIdCallable(id));
|
||||
return records == null || records.isEmpty() ? null : records.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a single record from the ArXiv matching the query.
|
||||
* Field "query" will be used to get data from.
|
||||
*
|
||||
* @see org.dspace.importer.external.datamodel.Query
|
||||
* @param query a query matching a single record
|
||||
* @return the first matching record
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public ImportRecord getRecord(Query query) throws MetadataSourceException {
|
||||
List<ImportRecord> records = retry(new SearchByIdCallable(query));
|
||||
return records == null || records.isEmpty() ? null : records.get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the class
|
||||
*
|
||||
* @throws Exception on generic exception
|
||||
*/
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
Client client = ClientBuilder.newClient();
|
||||
webTarget = client.target(baseAddress);
|
||||
}
|
||||
|
||||
/**
|
||||
* The string that identifies this import implementation. Preferable a URI
|
||||
*
|
||||
* @return the identifying uri
|
||||
*/
|
||||
@Override
|
||||
public String getImportSource() {
|
||||
return "arxiv";
|
||||
}
|
||||
|
||||
/**
|
||||
* Expect this method will be not used and erased from the interface soon
|
||||
*/
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
|
||||
// FIXME: we need this method?
|
||||
throw new MethodNotFoundException("This method is not implemented for ArXiv");
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds records based on query object.
|
||||
* Supports search by title and/or author
|
||||
*
|
||||
* @param query a query object to base the search on.
|
||||
* @return a collection of import records.
|
||||
* @throws MetadataSourceException if the underlying methods throw any exception.
|
||||
*/
|
||||
@Override
|
||||
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
|
||||
return retry(new FindMatchingRecordCallable(query));
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is a Callable implementation to count the number of entries for an ArXiv
|
||||
* query.
|
||||
* This Callable use as query value to ArXiv the string queryString passed to constructor.
|
||||
* If the object will be construct through Query.class instance, the value of the Query's
|
||||
* map with the key "query" will be used.
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
private class CountByQueryCallable implements Callable<Integer> {
|
||||
private Query query;
|
||||
|
||||
|
||||
private CountByQueryCallable(String queryString) {
|
||||
query = new Query();
|
||||
query.addParameter("query", queryString);
|
||||
}
|
||||
|
||||
private CountByQueryCallable(Query query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Integer call() throws Exception {
|
||||
String queryString = query.getParameterAsClass("query", String.class);
|
||||
Integer start = query.getParameterAsClass("start", Integer.class);
|
||||
Integer maxResult = query.getParameterAsClass("count", Integer.class);
|
||||
WebTarget local = webTarget.queryParam("search_query", queryString);
|
||||
if (maxResult != null) {
|
||||
local = local.queryParam("max_results", String.valueOf(maxResult));
|
||||
}
|
||||
if (start != null) {
|
||||
local = local.queryParam("start", String.valueOf(start));
|
||||
}
|
||||
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
Response response = invocationBuilder.get();
|
||||
if (response.getStatus() == 200) {
|
||||
String responseString = response.readEntity(String.class);
|
||||
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(responseString));
|
||||
OMElement element = records.getDocumentElement();
|
||||
AXIOMXPath xpath = null;
|
||||
try {
|
||||
xpath = new AXIOMXPath("opensearch:totalResults");
|
||||
xpath.addNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/");
|
||||
OMElement count = (OMElement) xpath.selectSingleNode(element);
|
||||
return Integer.parseInt(count.getText());
|
||||
} catch (JaxenException e) {
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is a Callable implementation to get ArXiv entries based on
|
||||
* query object.
|
||||
* This Callable use as query value the string queryString passed to constructor.
|
||||
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
|
||||
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
|
||||
*
|
||||
* @see org.dspace.importer.external.datamodel.Query
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
|
||||
private Query query;
|
||||
|
||||
|
||||
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
|
||||
query = new Query();
|
||||
query.addParameter("query", queryString);
|
||||
query.addParameter("start", start);
|
||||
query.addParameter("count", maxResult);
|
||||
}
|
||||
|
||||
private SearchByQueryCallable(Query query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<ImportRecord> call() throws Exception {
|
||||
List<ImportRecord> results = new ArrayList<ImportRecord>();
|
||||
String queryString = query.getParameterAsClass("query", String.class);
|
||||
Integer start = query.getParameterAsClass("start", Integer.class);
|
||||
Integer maxResult = query.getParameterAsClass("count", Integer.class);
|
||||
WebTarget local = webTarget.queryParam("search_query", queryString);
|
||||
if (maxResult != null) {
|
||||
local = local.queryParam("max_results", String.valueOf(maxResult));
|
||||
}
|
||||
if (start != null) {
|
||||
local = local.queryParam("start", String.valueOf(start));
|
||||
}
|
||||
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
Response response = invocationBuilder.get();
|
||||
if (response.getStatus() == 200) {
|
||||
String responseString = response.readEntity(String.class);
|
||||
List<OMElement> omElements = splitToRecords(responseString);
|
||||
for (OMElement record : omElements) {
|
||||
results.add(transformSourceRecords(record));
|
||||
}
|
||||
return results;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is a Callable implementation to get an ArXiv entry using ArXiv ID
|
||||
* The ID to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
|
||||
*
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
|
||||
private Query query;
|
||||
|
||||
private SearchByIdCallable(Query query) {
|
||||
this.query = query;
|
||||
}
|
||||
|
||||
private SearchByIdCallable(String id) {
|
||||
this.query = new Query();
|
||||
query.addParameter("id", id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ImportRecord> call() throws Exception {
|
||||
List<ImportRecord> results = new ArrayList<ImportRecord>();
|
||||
String arxivid = query.getParameterAsClass("id", String.class);
|
||||
if (StringUtils.isNotBlank(arxivid)) {
|
||||
arxivid = arxivid.trim();
|
||||
if (arxivid.startsWith("http://arxiv.org/abs/")) {
|
||||
arxivid = arxivid.substring("http://arxiv.org/abs/".length());
|
||||
} else if (arxivid.toLowerCase().startsWith("arxiv:")) {
|
||||
arxivid = arxivid.substring("arxiv:".length());
|
||||
}
|
||||
}
|
||||
WebTarget local = webTarget.queryParam("id_list", arxivid);
|
||||
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
Response response = invocationBuilder.get();
|
||||
if (response.getStatus() == 200) {
|
||||
String responseString = response.readEntity(String.class);
|
||||
List<OMElement> omElements = splitToRecords(responseString);
|
||||
for (OMElement record : omElements) {
|
||||
results.add(transformSourceRecords(record));
|
||||
}
|
||||
return results;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This class is a Callable implementation to search ArXiv entries
|
||||
* using author and title.
|
||||
* There are two field in the Query map to pass, with keys "title" and "author"
|
||||
* (at least one must be used).
|
||||
*
|
||||
* @see org.dspace.importer.external.datamodel.Query
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
|
||||
|
||||
private Query query;
|
||||
|
||||
private FindMatchingRecordCallable(Query q) {
|
||||
query = q;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ImportRecord> call() throws Exception {
|
||||
String queryString = getQuery(this.query);
|
||||
List<ImportRecord> results = new ArrayList<ImportRecord>();
|
||||
WebTarget local = webTarget.queryParam("search_query", queryString);
|
||||
Invocation.Builder invocationBuilder = local.request(MediaType.TEXT_PLAIN_TYPE);
|
||||
Response response = invocationBuilder.get();
|
||||
if (response.getStatus() == 200) {
|
||||
String responseString = response.readEntity(String.class);
|
||||
List<OMElement> omElements = splitToRecords(responseString);
|
||||
for (OMElement record : omElements) {
|
||||
results.add(transformSourceRecords(record));
|
||||
}
|
||||
return results;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String getQuery(Query query) {
|
||||
String title = query.getParameterAsClass("title", String.class);
|
||||
String author = query.getParameterAsClass("author", String.class);
|
||||
StringBuffer queryString = new StringBuffer();
|
||||
if (StringUtils.isNotBlank(title)) {
|
||||
queryString.append("ti:\"").append(title).append("\"");
|
||||
}
|
||||
if (StringUtils.isNotBlank(author)) {
|
||||
// [FAU]
|
||||
if (queryString.length() > 0) {
|
||||
queryString.append(" AND ");
|
||||
}
|
||||
queryString.append("au:\"").append(author).append("\"");
|
||||
}
|
||||
return queryString.toString();
|
||||
}
|
||||
}
|
||||
|
||||
private List<OMElement> splitToRecords(String recordsSrc) {
|
||||
OMXMLParserWrapper records = OMXMLBuilderFactory.createOMBuilder(new StringReader(recordsSrc));
|
||||
OMElement element = records.getDocumentElement();
|
||||
AXIOMXPath xpath = null;
|
||||
try {
|
||||
xpath = new AXIOMXPath("ns:entry");
|
||||
xpath.addNamespace("ns", "http://www.w3.org/2005/Atom");
|
||||
List<OMElement> recordsList = xpath.selectNodes(element);
|
||||
return recordsList;
|
||||
} catch (JaxenException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the baseAddress set to this object
|
||||
*
|
||||
* @return The String object that represents the baseAddress of this object
|
||||
*/
|
||||
public String getBaseAddress() {
|
||||
return baseAddress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the baseAddress to this object
|
||||
*
|
||||
* @param baseAddress The String object that represents the baseAddress of this object
|
||||
*/
|
||||
public void setBaseAddress(String baseAddress) {
|
||||
this.baseAddress = baseAddress;
|
||||
}
|
||||
}
|
@@ -71,7 +71,7 @@ public class Query {
|
||||
return null;
|
||||
} else {
|
||||
Object o = c.iterator().next();
|
||||
if (clazz.isAssignableFrom(o.getClass())) {
|
||||
if (o != null && clazz.isAssignableFrom(o.getClass())) {
|
||||
return (T) o;
|
||||
} else {
|
||||
return null;
|
||||
|
@@ -21,6 +21,8 @@ import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
|
||||
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
|
||||
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
|
||||
import org.jaxen.JaxenException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Required;
|
||||
|
||||
/**
|
||||
@@ -31,6 +33,8 @@ import org.springframework.beans.factory.annotation.Required;
|
||||
public class SimpleXpathMetadatumContributor implements MetadataContributor<OMElement> {
|
||||
private MetadataFieldConfig field;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SimpleXpathMetadatumContributor.class);
|
||||
|
||||
/**
|
||||
* Return prefixToNamespaceMapping
|
||||
*
|
||||
@@ -157,12 +161,12 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<OMEl
|
||||
} else if (el instanceof OMText) {
|
||||
values.add(metadataFieldMapping.toDCValue(field, ((OMText) el).getText()));
|
||||
} else {
|
||||
System.err.println("node of type: " + el.getClass());
|
||||
log.error("node of type: " + el.getClass());
|
||||
}
|
||||
}
|
||||
return values;
|
||||
} catch (JaxenException e) {
|
||||
System.err.println(query);
|
||||
log.error(query, e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
|
@@ -16,7 +16,6 @@ import org.dspace.importer.external.metadatamapping.contributor.MetadataContribu
|
||||
import org.dspace.importer.external.metadatamapping.transform.GenerateQueryService;
|
||||
import org.dspace.importer.external.service.components.AbstractRemoteMetadataSource;
|
||||
import org.dspace.importer.external.service.components.MetadataSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Required;
|
||||
|
||||
/**
|
||||
@@ -49,7 +48,6 @@ public abstract class AbstractImportMetadataSourceService<RecordType> extends Ab
|
||||
*
|
||||
* @param generateQueryForItem the query generator to be used.
|
||||
*/
|
||||
@Autowired
|
||||
public void setGenerateQueryForItem(GenerateQueryService generateQueryForItem) {
|
||||
this.generateQueryForItem = generateQueryForItem;
|
||||
}
|
||||
|
@@ -1,146 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import gr.ekt.bte.core.DataLoadingSpec;
|
||||
import gr.ekt.bte.core.Record;
|
||||
import gr.ekt.bte.core.RecordSet;
|
||||
import gr.ekt.bte.core.Value;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class ArXivFileDataLoader extends FileDataLoader {
|
||||
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(ArXivFileDataLoader.class);
|
||||
|
||||
Map<String, String> fieldMap; // mapping between service fields and local
|
||||
// intermediate fields
|
||||
|
||||
/**
|
||||
* Empty constructor
|
||||
*/
|
||||
public ArXivFileDataLoader() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filename Name of file to load ArXiv data from.
|
||||
*/
|
||||
public ArXivFileDataLoader(String filename) {
|
||||
super(filename);
|
||||
}
|
||||
|
||||
/*
|
||||
* {@see gr.ekt.bte.core.DataLoader#getRecords()}
|
||||
*
|
||||
* @throws MalformedSourceException
|
||||
*/
|
||||
@Override
|
||||
public RecordSet getRecords() throws MalformedSourceException {
|
||||
|
||||
RecordSet recordSet = new RecordSet();
|
||||
|
||||
try {
|
||||
InputStream inputStream = new FileInputStream(new File(filename));
|
||||
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
Document inDoc = db.parse(inputStream);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
List<Element> dataRoots = XMLUtils.getElementList(xmlRoot, "entry");
|
||||
|
||||
for (Element dataRoot : dataRoots) {
|
||||
Record record = ArxivUtils.convertArxixDomToRecord(dataRoot);
|
||||
if (record != null) {
|
||||
recordSet.addRecord(convertFields(record));
|
||||
}
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
} catch (ParserConfigurationException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
} catch (SAXException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
|
||||
return recordSet;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
|
||||
*/
|
||||
@Override
|
||||
public RecordSet getRecords(DataLoadingSpec spec)
|
||||
throws MalformedSourceException {
|
||||
if (spec.getOffset() > 0) {
|
||||
return new RecordSet();
|
||||
}
|
||||
return getRecords();
|
||||
}
|
||||
|
||||
public Record convertFields(Record publication) {
|
||||
for (String fieldName : fieldMap.keySet()) {
|
||||
String md = null;
|
||||
if (fieldMap != null) {
|
||||
md = this.fieldMap.get(fieldName);
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(md)) {
|
||||
continue;
|
||||
} else {
|
||||
md = md.trim();
|
||||
}
|
||||
|
||||
if (publication.isMutable()) {
|
||||
List<Value> values = publication.getValues(fieldName);
|
||||
publication.makeMutable().removeField(fieldName);
|
||||
publication.makeMutable().addField(md, values);
|
||||
}
|
||||
}
|
||||
|
||||
return publication;
|
||||
}
|
||||
|
||||
public void setFieldMap(Map<String, String> fieldMap) {
|
||||
this.fieldMap = fieldMap;
|
||||
}
|
||||
}
|
@@ -1,84 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import gr.ekt.bte.core.Record;
|
||||
import org.apache.http.HttpException;
|
||||
import org.dspace.core.Context;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class ArXivOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
|
||||
protected ArXivService arXivService = new ArXivService();
|
||||
|
||||
protected boolean searchProvider = true;
|
||||
|
||||
public void setArXivService(ArXivService arXivService) {
|
||||
this.arXivService = arXivService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedIdentifiers() {
|
||||
return Arrays.asList(new String[] {ARXIV, DOI});
|
||||
}
|
||||
|
||||
public void setSearchProvider(boolean searchProvider) {
|
||||
this.searchProvider = searchProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSearchProvider() {
|
||||
return searchProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Record> getByIdentifier(Context context,
|
||||
Map<String, Set<String>> keys) throws HttpException, IOException {
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
if (keys != null) {
|
||||
Set<String> dois = keys.get(DOI);
|
||||
Set<String> arxivids = keys.get(ARXIV);
|
||||
List<Record> items = new ArrayList<Record>();
|
||||
if (dois != null && dois.size() > 0) {
|
||||
items.addAll(arXivService.getByDOIs(dois));
|
||||
}
|
||||
if (arxivids != null && arxivids.size() > 0) {
|
||||
for (String arxivid : arxivids) {
|
||||
items.add(arXivService.getByArXivIDs(arxivid));
|
||||
}
|
||||
}
|
||||
|
||||
for (Record item : items) {
|
||||
results.add(convertFields(item));
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Record> search(Context context, String title, String author,
|
||||
int year) throws HttpException, IOException {
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
List<Record> items = arXivService.searchByTerm(title, author, year);
|
||||
for (Record item : items) {
|
||||
results.add(convertFields(item));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
@@ -1,162 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
|
||||
import gr.ekt.bte.core.Record;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.params.CoreConnectionPNames;
|
||||
import org.apache.http.params.HttpParams;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class ArXivService {
|
||||
private int timeout = 1000;
|
||||
|
||||
/**
|
||||
* How long to wait for a connection to be established.
|
||||
*
|
||||
* @param timeout milliseconds
|
||||
*/
|
||||
public void setTimeout(int timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public List<Record> getByDOIs(Set<String> dois) throws HttpException,
|
||||
IOException {
|
||||
if (dois != null && dois.size() > 0) {
|
||||
String doisQuery = StringUtils.join(dois.iterator(), " OR ");
|
||||
return search(doisQuery, null, 100);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<Record> searchByTerm(String title, String author, int year)
|
||||
throws HttpException, IOException {
|
||||
StringBuffer query = new StringBuffer();
|
||||
if (StringUtils.isNotBlank(title)) {
|
||||
query.append("ti:\"").append(title).append("\"");
|
||||
}
|
||||
if (StringUtils.isNotBlank(author)) {
|
||||
// [FAU]
|
||||
if (query.length() > 0) {
|
||||
query.append(" AND ");
|
||||
}
|
||||
query.append("au:\"").append(author).append("\"");
|
||||
}
|
||||
return search(query.toString(), "", 10);
|
||||
}
|
||||
|
||||
protected List<Record> search(String query, String arxivid, int max_result)
|
||||
throws IOException, HttpException {
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
HttpGet method = null;
|
||||
try {
|
||||
HttpClient client = new DefaultHttpClient();
|
||||
HttpParams params = client.getParams();
|
||||
params.setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout);
|
||||
|
||||
try {
|
||||
URIBuilder uriBuilder = new URIBuilder("http://export.arxiv.org/api/query");
|
||||
uriBuilder.addParameter("id_list", arxivid);
|
||||
uriBuilder.addParameter("search_query", query);
|
||||
uriBuilder.addParameter("max_results", String.valueOf(max_result));
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
} catch (URISyntaxException ex) {
|
||||
throw new HttpException(ex.getMessage());
|
||||
}
|
||||
|
||||
// Execute the method.
|
||||
HttpResponse response = client.execute(method);
|
||||
StatusLine responseStatus = response.getStatusLine();
|
||||
int statusCode = responseStatus.getStatusCode();
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
if (statusCode == HttpStatus.SC_BAD_REQUEST) {
|
||||
throw new RuntimeException("arXiv query is not valid");
|
||||
} else {
|
||||
throw new RuntimeException("Http call failed: "
|
||||
+ responseStatus);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder db = factory.newDocumentBuilder();
|
||||
Document inDoc = db.parse(response.getEntity().getContent());
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
List<Element> dataRoots = XMLUtils.getElementList(xmlRoot,
|
||||
"entry");
|
||||
|
||||
for (Element dataRoot : dataRoots) {
|
||||
Record crossitem = ArxivUtils
|
||||
.convertArxixDomToRecord(dataRoot);
|
||||
if (crossitem != null) {
|
||||
results.add(crossitem);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
"ArXiv identifier is not valid or not exist");
|
||||
}
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
public Record getByArXivIDs(String raw) throws HttpException, IOException {
|
||||
if (StringUtils.isNotBlank(raw)) {
|
||||
raw = raw.trim();
|
||||
if (raw.startsWith("http://arxiv.org/abs/")) {
|
||||
raw = raw.substring("http://arxiv.org/abs/".length());
|
||||
} else if (raw.toLowerCase().startsWith("arxiv:")) {
|
||||
raw = raw.substring("arxiv:".length());
|
||||
}
|
||||
List<Record> result = search("", raw, 1);
|
||||
if (result != null && result.size() > 0) {
|
||||
return result.get(0);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
@@ -1,151 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
/**
|
||||
*
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import gr.ekt.bte.core.MutableRecord;
|
||||
import gr.ekt.bte.core.Record;
|
||||
import gr.ekt.bte.core.StringValue;
|
||||
import gr.ekt.bte.core.Value;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.submit.util.SubmissionLookupPublication;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class ArxivUtils {
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private ArxivUtils() { }
|
||||
|
||||
public static Record convertArxixDomToRecord(Element dataRoot) {
|
||||
MutableRecord record = new SubmissionLookupPublication("");
|
||||
|
||||
String articleTitle = XMLUtils.getElementValue(dataRoot, "title");
|
||||
if (articleTitle != null) {
|
||||
record.addValue("title", new StringValue(articleTitle));
|
||||
}
|
||||
String summary = XMLUtils.getElementValue(dataRoot, "summary");
|
||||
if (summary != null) {
|
||||
record.addValue("summary", new StringValue(summary));
|
||||
}
|
||||
String year = XMLUtils.getElementValue(dataRoot, "published");
|
||||
if (year != null) {
|
||||
record.addValue("published", new StringValue(year));
|
||||
}
|
||||
String splashPageUrl = XMLUtils.getElementValue(dataRoot, "id");
|
||||
if (splashPageUrl != null) {
|
||||
record.addValue("id", new StringValue(splashPageUrl));
|
||||
}
|
||||
String comment = XMLUtils.getElementValue(dataRoot, "arxiv:comment");
|
||||
if (comment != null) {
|
||||
record.addValue("comment", new StringValue(comment));
|
||||
}
|
||||
|
||||
List<Element> links = XMLUtils.getElementList(dataRoot, "link");
|
||||
if (links != null) {
|
||||
for (Element link : links) {
|
||||
if ("related".equals(link.getAttribute("rel"))
|
||||
&& "pdf".equals(link.getAttribute("title"))) {
|
||||
String pdfUrl = link.getAttribute("href");
|
||||
if (pdfUrl != null) {
|
||||
record.addValue("pdfUrl", new StringValue(pdfUrl));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String doi = XMLUtils.getElementValue(dataRoot, "arxiv:doi");
|
||||
if (doi != null) {
|
||||
record.addValue("doi", new StringValue(doi));
|
||||
}
|
||||
String journalRef = XMLUtils.getElementValue(dataRoot,
|
||||
"arxiv:journal_ref");
|
||||
if (journalRef != null) {
|
||||
record.addValue("journalRef", new StringValue(journalRef));
|
||||
}
|
||||
|
||||
List<String> primaryCategory = new LinkedList<String>();
|
||||
List<Element> primaryCategoryList = XMLUtils.getElementList(dataRoot,
|
||||
"arxiv:primary_category");
|
||||
if (primaryCategoryList != null) {
|
||||
for (Element primaryCategoryElement : primaryCategoryList) {
|
||||
primaryCategory
|
||||
.add(primaryCategoryElement.getAttribute("term"));
|
||||
}
|
||||
}
|
||||
|
||||
if (primaryCategory.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : primaryCategory) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("primaryCategory", values);
|
||||
}
|
||||
|
||||
List<String> category = new LinkedList<String>();
|
||||
List<Element> categoryList = XMLUtils.getElementList(dataRoot,
|
||||
"category");
|
||||
if (categoryList != null) {
|
||||
for (Element categoryElement : categoryList) {
|
||||
category.add(categoryElement.getAttribute("term"));
|
||||
}
|
||||
}
|
||||
|
||||
if (category.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : category) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("category", values);
|
||||
}
|
||||
|
||||
List<String> authors = new LinkedList<String>();
|
||||
List<String> authorsWithAffiliations = new LinkedList<String>();
|
||||
List<Element> authorList = XMLUtils.getElementList(dataRoot, "author");
|
||||
if (authorList != null) {
|
||||
for (Element authorElement : authorList) {
|
||||
String authorName = XMLUtils.getElementValue(authorElement, "name");
|
||||
String authorAffiliation = XMLUtils.getElementValue(authorElement, "arxiv:affiliation");
|
||||
|
||||
authors.add(authorName);
|
||||
authorsWithAffiliations.add(authorName + ": " + authorAffiliation);
|
||||
}
|
||||
}
|
||||
|
||||
if (authors.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String sArray : authors) {
|
||||
values.add(new StringValue(sArray));
|
||||
}
|
||||
record.addField("author", values);
|
||||
}
|
||||
|
||||
if (authorsWithAffiliations.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String sArray : authorsWithAffiliations) {
|
||||
values.add(new StringValue(sArray));
|
||||
}
|
||||
record.addField("authorWithAffiliation", values);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
}
|
@@ -1,148 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import gr.ekt.bte.core.DataLoadingSpec;
|
||||
import gr.ekt.bte.core.Record;
|
||||
import gr.ekt.bte.core.RecordSet;
|
||||
import gr.ekt.bte.core.Value;
|
||||
import gr.ekt.bte.dataloader.FileDataLoader;
|
||||
import gr.ekt.bte.exceptions.MalformedSourceException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class PubmedFileDataLoader extends FileDataLoader {
|
||||
|
||||
Map<String, String> fieldMap; // mapping between service fields and local
|
||||
// intermediate fields
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public PubmedFileDataLoader() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param filename Name of file to load CiNii data from.
|
||||
*/
|
||||
public PubmedFileDataLoader(String filename) {
|
||||
super(filename);
|
||||
}
|
||||
|
||||
/*
|
||||
* {@see gr.ekt.bte.core.DataLoader#getRecords()}
|
||||
*
|
||||
* @throws MalformedSourceException
|
||||
*/
|
||||
@Override
|
||||
public RecordSet getRecords() throws MalformedSourceException {
|
||||
|
||||
RecordSet recordSet = new RecordSet();
|
||||
|
||||
try {
|
||||
InputStream inputStream = new FileInputStream(new File(filename));
|
||||
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
Document inDoc = builder.parse(inputStream);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
List<Element> pubArticles = XMLUtils.getElementList(xmlRoot,
|
||||
"PubmedArticle");
|
||||
|
||||
for (Element xmlArticle : pubArticles) {
|
||||
Record record = null;
|
||||
try {
|
||||
record = PubmedUtils.convertPubmedDomToRecord(xmlArticle);
|
||||
recordSet.addRecord(convertFields(record));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
} catch (ParserConfigurationException e) {
|
||||
e.printStackTrace();
|
||||
} catch (SAXException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return recordSet;
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* gr.ekt.bte.core.DataLoader#getRecords(gr.ekt.bte.core.DataLoadingSpec)
|
||||
*/
|
||||
@Override
|
||||
public RecordSet getRecords(DataLoadingSpec spec)
|
||||
throws MalformedSourceException {
|
||||
if (spec.getOffset() > 0) {
|
||||
return new RecordSet();
|
||||
}
|
||||
return getRecords();
|
||||
}
|
||||
|
||||
public Record convertFields(Record publication) {
|
||||
for (String fieldName : fieldMap.keySet()) {
|
||||
String md = null;
|
||||
if (fieldMap != null) {
|
||||
md = this.fieldMap.get(fieldName);
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(md)) {
|
||||
continue;
|
||||
} else {
|
||||
md = md.trim();
|
||||
}
|
||||
|
||||
if (publication.isMutable()) {
|
||||
List<Value> values = publication.getValues(fieldName);
|
||||
publication.makeMutable().removeField(fieldName);
|
||||
publication.makeMutable().addField(md, values);
|
||||
}
|
||||
}
|
||||
|
||||
return publication;
|
||||
}
|
||||
|
||||
public void setFieldMap(Map<String, String> fieldMap) {
|
||||
this.fieldMap = fieldMap;
|
||||
}
|
||||
}
|
@@ -1,116 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import gr.ekt.bte.core.Record;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.LogManager;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class PubmedOnlineDataLoader extends NetworkSubmissionLookupDataLoader {
|
||||
protected boolean searchProvider = true;
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedOnlineDataLoader.class);
|
||||
|
||||
protected PubmedService pubmedService = new PubmedService();
|
||||
|
||||
public void setPubmedService(PubmedService pubmedService) {
|
||||
this.pubmedService = pubmedService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSupportedIdentifiers() {
|
||||
return Arrays.asList(new String[] {PUBMED, DOI});
|
||||
}
|
||||
|
||||
public void setSearchProvider(boolean searchProvider) {
|
||||
this.searchProvider = searchProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSearchProvider() {
|
||||
return searchProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Record> getByIdentifier(Context context,
|
||||
Map<String, Set<String>> keys) throws HttpException, IOException {
|
||||
Set<String> pmids = keys != null ? keys.get(PUBMED) : null;
|
||||
Set<String> dois = keys != null ? keys.get(DOI) : null;
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
if (pmids != null && pmids.size() > 0
|
||||
&& (dois == null || dois.size() == 0)) {
|
||||
for (String pmid : pmids) {
|
||||
Record p = null;
|
||||
try {
|
||||
p = pubmedService.getByPubmedID(pmid);
|
||||
} catch (Exception e) {
|
||||
log.error(LogManager.getHeader(context, "getByIdentifier",
|
||||
"pmid=" + pmid), e);
|
||||
}
|
||||
if (p != null) {
|
||||
results.add(convertFields(p));
|
||||
}
|
||||
}
|
||||
} else if (dois != null && dois.size() > 0
|
||||
&& (pmids == null || pmids.size() == 0)) {
|
||||
StringBuffer query = new StringBuffer();
|
||||
for (String d : dois) {
|
||||
if (query.length() > 0) {
|
||||
query.append(" OR ");
|
||||
}
|
||||
query.append(d).append("[AI]");
|
||||
}
|
||||
|
||||
List<Record> pubmedResults = pubmedService.search(query.toString());
|
||||
for (Record p : pubmedResults) {
|
||||
results.add(convertFields(p));
|
||||
}
|
||||
} else if (dois != null && dois.size() > 0 && pmids != null
|
||||
&& pmids.size() > 0) {
|
||||
// EKT:ToDo: support list of dois and pmids in the search method of
|
||||
// pubmedService
|
||||
List<Record> pubmedResults = pubmedService.search(dois.iterator()
|
||||
.next(), pmids.iterator().next());
|
||||
if (pubmedResults != null) {
|
||||
for (Record p : pubmedResults) {
|
||||
results.add(convertFields(p));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Record> search(Context context, String title, String author,
|
||||
int year) throws HttpException, IOException {
|
||||
List<Record> pubmedResults = pubmedService.search(title, author, year);
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
if (pubmedResults != null) {
|
||||
for (Record p : pubmedResults) {
|
||||
results.add(convertFields(p));
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
@@ -1,274 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
|
||||
import gr.ekt.bte.core.Record;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.apache.http.impl.client.DefaultHttpClient;
|
||||
import org.apache.http.params.CoreConnectionPNames;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.core.ConfigurationManager;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class PubmedService {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(PubmedService.class);
|
||||
|
||||
protected int timeout = 1000;
|
||||
|
||||
public void setTimeout(int timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public Record getByPubmedID(String pubmedid) throws HttpException,
|
||||
IOException, ParserConfigurationException, SAXException {
|
||||
List<String> ids = new ArrayList<String>();
|
||||
ids.add(pubmedid.trim());
|
||||
List<Record> items = getByPubmedIDs(ids);
|
||||
if (items != null && items.size() > 0) {
|
||||
return items.get(0);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<Record> search(String title, String author, int year)
|
||||
throws HttpException, IOException {
|
||||
StringBuffer query = new StringBuffer();
|
||||
if (StringUtils.isNotBlank(title)) {
|
||||
query.append("((").append(title).append("[TI]) OR (");
|
||||
// [TI] does not always work, book chapter title
|
||||
query.append("(").append(title).append("[book]))");
|
||||
}
|
||||
if (StringUtils.isNotBlank(author)) {
|
||||
// [FAU]
|
||||
if (query.length() > 0) {
|
||||
query.append(" AND ");
|
||||
}
|
||||
query.append("(").append(author).append("[AU])");
|
||||
}
|
||||
if (year != -1) {
|
||||
// [DP]
|
||||
if (query.length() > 0) {
|
||||
query.append(" AND ");
|
||||
}
|
||||
query.append(year).append("[DP]");
|
||||
}
|
||||
return search(query.toString());
|
||||
}
|
||||
|
||||
public List<Record> search(String query) throws IOException, HttpException {
|
||||
List<Record> results = new ArrayList<>();
|
||||
if (!ConfigurationManager.getBooleanProperty(SubmissionLookupService.CFG_MODULE, "remoteservice.demo")) {
|
||||
HttpGet method = null;
|
||||
try {
|
||||
HttpClient client = new DefaultHttpClient();
|
||||
client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, timeout);
|
||||
|
||||
URIBuilder uriBuilder = new URIBuilder(
|
||||
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi");
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("datetype", "edat");
|
||||
uriBuilder.addParameter("retmax", "10");
|
||||
uriBuilder.addParameter("term", query);
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
|
||||
// Execute the method.
|
||||
HttpResponse response = client.execute(method);
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
int statusCode = statusLine.getStatusCode();
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new RuntimeException("WS call failed: "
|
||||
+ statusLine);
|
||||
}
|
||||
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder builder;
|
||||
try {
|
||||
builder = factory.newDocumentBuilder();
|
||||
|
||||
Document inDoc = builder.parse(response.getEntity().getContent());
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
Element idList = XMLUtils.getSingleElement(xmlRoot,
|
||||
"IdList");
|
||||
List<String> pubmedIDs = XMLUtils.getElementValueList(
|
||||
idList, "Id");
|
||||
results = getByPubmedIDs(pubmedIDs);
|
||||
} catch (ParserConfigurationException e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
} catch (SAXException e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
}
|
||||
} catch (Exception e1) {
|
||||
log.error(e1.getMessage(), e1);
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
InputStream stream = null;
|
||||
try {
|
||||
File file = new File(
|
||||
ConfigurationManager.getProperty("dspace.dir")
|
||||
+ "/config/crosswalks/demo/pubmed-search.xml");
|
||||
stream = new FileInputStream(file);
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
Document inDoc = builder.parse(stream);
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
Element idList = XMLUtils.getSingleElement(xmlRoot, "IdList");
|
||||
List<String> pubmedIDs = XMLUtils.getElementValueList(idList,
|
||||
"Id");
|
||||
results = getByPubmedIDs(pubmedIDs);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
} finally {
|
||||
if (stream != null) {
|
||||
try {
|
||||
stream.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<Record> getByPubmedIDs(List<String> pubmedIDs)
|
||||
throws HttpException, IOException, ParserConfigurationException,
|
||||
SAXException {
|
||||
List<Record> results = new ArrayList<Record>();
|
||||
HttpGet method = null;
|
||||
try {
|
||||
HttpClient client = new DefaultHttpClient();
|
||||
client.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5 * timeout);
|
||||
|
||||
try {
|
||||
URIBuilder uriBuilder = new URIBuilder(
|
||||
"https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi");
|
||||
uriBuilder.addParameter("db", "pubmed");
|
||||
uriBuilder.addParameter("retmode", "xml");
|
||||
uriBuilder.addParameter("rettype", "full");
|
||||
uriBuilder.addParameter("id", StringUtils.join(
|
||||
pubmedIDs.iterator(), ","));
|
||||
method = new HttpGet(uriBuilder.build());
|
||||
} catch (URISyntaxException ex) {
|
||||
throw new RuntimeException("Request not sent", ex);
|
||||
}
|
||||
|
||||
// Execute the method.
|
||||
HttpResponse response = client.execute(method);
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
int statusCode = statusLine.getStatusCode();
|
||||
|
||||
if (statusCode != HttpStatus.SC_OK) {
|
||||
throw new RuntimeException("WS call failed: " + statusLine);
|
||||
}
|
||||
|
||||
DocumentBuilderFactory factory = DocumentBuilderFactory
|
||||
.newInstance();
|
||||
factory.setValidating(false);
|
||||
factory.setIgnoringComments(true);
|
||||
factory.setIgnoringElementContentWhitespace(true);
|
||||
// disallow DTD parsing to ensure no XXE attacks can occur.
|
||||
// See https://cheatsheetseries.owasp.org/cheatsheets/XML_External_Entity_Prevention_Cheat_Sheet.html
|
||||
factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
|
||||
|
||||
DocumentBuilder builder = factory.newDocumentBuilder();
|
||||
Document inDoc = builder
|
||||
.parse(response.getEntity().getContent());
|
||||
|
||||
Element xmlRoot = inDoc.getDocumentElement();
|
||||
List<Element> pubArticles = XMLUtils.getElementList(xmlRoot,
|
||||
"PubmedArticle");
|
||||
|
||||
for (Element xmlArticle : pubArticles) {
|
||||
Record pubmedItem = null;
|
||||
try {
|
||||
pubmedItem = PubmedUtils
|
||||
.convertPubmedDomToRecord(xmlArticle);
|
||||
results.add(pubmedItem);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(
|
||||
"PubmedID is not valid or not exist: "
|
||||
+ e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
} finally {
|
||||
if (method != null) {
|
||||
method.releaseConnection();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public List<Record> search(String doi, String pmid) throws HttpException,
|
||||
IOException {
|
||||
StringBuffer query = new StringBuffer();
|
||||
if (StringUtils.isNotBlank(doi)) {
|
||||
query.append(doi);
|
||||
query.append("[AID]");
|
||||
}
|
||||
if (StringUtils.isNotBlank(pmid)) {
|
||||
// [FAU]
|
||||
if (query.length() > 0) {
|
||||
query.append(" OR ");
|
||||
}
|
||||
query.append(pmid).append("[PMID]");
|
||||
}
|
||||
return search(query.toString());
|
||||
}
|
||||
}
|
@@ -1,316 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
/**
|
||||
*
|
||||
*/
|
||||
package org.dspace.submit.lookup;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import gr.ekt.bte.core.MutableRecord;
|
||||
import gr.ekt.bte.core.Record;
|
||||
import gr.ekt.bte.core.StringValue;
|
||||
import gr.ekt.bte.core.Value;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.app.util.XMLUtils;
|
||||
import org.dspace.submit.util.SubmissionLookupPublication;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
/**
|
||||
* @author Andrea Bollini
|
||||
* @author Kostas Stamatis
|
||||
* @author Luigi Andrea Pascarelli
|
||||
* @author Panagiotis Koutsourakis
|
||||
*/
|
||||
public class PubmedUtils {
|
||||
|
||||
/**
|
||||
* Default constructor
|
||||
*/
|
||||
private PubmedUtils() { }
|
||||
|
||||
public static Record convertPubmedDomToRecord(Element pubArticle) {
|
||||
MutableRecord record = new SubmissionLookupPublication("");
|
||||
|
||||
Map<String, String> monthToNum = new HashMap<String, String>();
|
||||
monthToNum.put("Jan", "01");
|
||||
monthToNum.put("Feb", "02");
|
||||
monthToNum.put("Mar", "03");
|
||||
monthToNum.put("Apr", "04");
|
||||
monthToNum.put("May", "05");
|
||||
monthToNum.put("Jun", "06");
|
||||
monthToNum.put("Jul", "07");
|
||||
monthToNum.put("Aug", "08");
|
||||
monthToNum.put("Sep", "09");
|
||||
monthToNum.put("Oct", "10");
|
||||
monthToNum.put("Nov", "11");
|
||||
monthToNum.put("Dec", "12");
|
||||
|
||||
Element medline = XMLUtils.getSingleElement(pubArticle,
|
||||
"MedlineCitation");
|
||||
|
||||
Element article = XMLUtils.getSingleElement(medline, "Article");
|
||||
Element pubmed = XMLUtils.getSingleElement(pubArticle, "PubmedData");
|
||||
|
||||
Element identifierList = XMLUtils.getSingleElement(pubmed,
|
||||
"ArticleIdList");
|
||||
if (identifierList != null) {
|
||||
List<Element> identifiers = XMLUtils.getElementList(identifierList,
|
||||
"ArticleId");
|
||||
if (identifiers != null) {
|
||||
for (Element id : identifiers) {
|
||||
if ("pubmed".equals(id.getAttribute("IdType"))) {
|
||||
String pubmedID = id.getTextContent().trim();
|
||||
if (pubmedID != null) {
|
||||
record.addValue("pubmedID", new StringValue(
|
||||
pubmedID));
|
||||
}
|
||||
} else if ("doi".equals(id.getAttribute("IdType"))) {
|
||||
String doi = id.getTextContent().trim();
|
||||
if (doi != null) {
|
||||
record.addValue("doi", new StringValue(doi));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String status = XMLUtils.getElementValue(pubmed, "PublicationStatus");
|
||||
if (status != null) {
|
||||
record.addValue("publicationStatus", new StringValue(status));
|
||||
}
|
||||
|
||||
String pubblicationModel = XMLUtils.getElementAttribute(medline,
|
||||
"Article", "PubModel");
|
||||
if (pubblicationModel != null) {
|
||||
record.addValue("pubModel", new StringValue(
|
||||
pubblicationModel));
|
||||
}
|
||||
|
||||
String title = XMLUtils.getElementValue(article, "ArticleTitle");
|
||||
if (title != null) {
|
||||
record.addValue("articleTitle", new StringValue(title));
|
||||
}
|
||||
|
||||
Element abstractElement = XMLUtils
|
||||
.getSingleElement(article, "Abstract");
|
||||
if (abstractElement == null) {
|
||||
abstractElement = XMLUtils.getSingleElement(medline,
|
||||
"OtherAbstract");
|
||||
}
|
||||
if (abstractElement != null) {
|
||||
String summary = XMLUtils.getElementValue(abstractElement,
|
||||
"AbstractText");
|
||||
if (summary != null) {
|
||||
record.addValue("abstractText", new StringValue(summary));
|
||||
}
|
||||
}
|
||||
|
||||
List<String[]> authors = new LinkedList<String[]>();
|
||||
Element authorList = XMLUtils.getSingleElement(article, "AuthorList");
|
||||
if (authorList != null) {
|
||||
List<Element> authorsElement = XMLUtils.getElementList(authorList,
|
||||
"Author");
|
||||
if (authorsElement != null) {
|
||||
for (Element author : authorsElement) {
|
||||
if (StringUtils.isBlank(XMLUtils.getElementValue(author,
|
||||
"CollectiveName"))) {
|
||||
authors.add(new String[] {
|
||||
XMLUtils.getElementValue(author, "ForeName"),
|
||||
XMLUtils.getElementValue(author, "LastName")});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (authors.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String[] sArray : authors) {
|
||||
values.add(new StringValue(sArray[1] + ", " + sArray[0]));
|
||||
}
|
||||
record.addField("author", values);
|
||||
}
|
||||
|
||||
Element journal = XMLUtils.getSingleElement(article, "Journal");
|
||||
if (journal != null) {
|
||||
List<Element> jnumbers = XMLUtils.getElementList(journal, "ISSN");
|
||||
if (jnumbers != null) {
|
||||
for (Element jnumber : jnumbers) {
|
||||
if ("Print".equals(jnumber.getAttribute("IssnType"))) {
|
||||
String issn = jnumber.getTextContent().trim();
|
||||
if (issn != null) {
|
||||
record.addValue("printISSN", new StringValue(issn));
|
||||
}
|
||||
} else {
|
||||
String eissn = jnumber.getTextContent().trim();
|
||||
if (eissn != null) {
|
||||
record.addValue("electronicISSN", new StringValue(eissn));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String journalTitle = XMLUtils.getElementValue(journal, "Title");
|
||||
if (journalTitle != null) {
|
||||
record.addValue("journalTitle", new StringValue(journalTitle));
|
||||
}
|
||||
|
||||
Element journalIssueElement = XMLUtils.getSingleElement(journal,
|
||||
"JournalIssue");
|
||||
if (journalIssueElement != null) {
|
||||
String volume = XMLUtils.getElementValue(journalIssueElement,
|
||||
"Volume");
|
||||
if (volume != null) {
|
||||
record.addValue("journalVolume", new StringValue(volume));
|
||||
}
|
||||
|
||||
String issue = XMLUtils.getElementValue(journalIssueElement,
|
||||
"Issue");
|
||||
if (issue != null) {
|
||||
record.addValue("journalIssue", new StringValue(issue));
|
||||
}
|
||||
|
||||
Element pubDateElement = XMLUtils.getSingleElement(
|
||||
journalIssueElement, "PubDate");
|
||||
|
||||
String pubDate = null;
|
||||
if (pubDateElement != null) {
|
||||
pubDate = XMLUtils.getElementValue(pubDateElement, "Year");
|
||||
|
||||
String mounth = XMLUtils.getElementValue(pubDateElement,
|
||||
"Month");
|
||||
String day = XMLUtils
|
||||
.getElementValue(pubDateElement, "Day");
|
||||
if (StringUtils.isNotBlank(mounth)
|
||||
&& monthToNum.containsKey(mounth)) {
|
||||
pubDate += "-" + monthToNum.get(mounth);
|
||||
if (StringUtils.isNotBlank(day)) {
|
||||
pubDate += "-" + (day.length() == 1 ? "0" + day : day);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (pubDate == null) {
|
||||
pubDate = XMLUtils.getElementValue(pubDateElement, "MedlineDate");
|
||||
}
|
||||
if (pubDate != null) {
|
||||
record.addValue("pubDate", new StringValue(pubDate));
|
||||
}
|
||||
}
|
||||
|
||||
String language = XMLUtils.getElementValue(article, "Language");
|
||||
if (language != null) {
|
||||
record.addValue("language", new StringValue(language));
|
||||
}
|
||||
|
||||
List<String> type = new LinkedList<String>();
|
||||
Element publicationTypeList = XMLUtils.getSingleElement(article,
|
||||
"PublicationTypeList");
|
||||
if (publicationTypeList != null) {
|
||||
List<Element> publicationTypes = XMLUtils.getElementList(
|
||||
publicationTypeList, "PublicationType");
|
||||
for (Element publicationType : publicationTypes) {
|
||||
type.add(publicationType.getTextContent().trim());
|
||||
}
|
||||
}
|
||||
if (type.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : type) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("publicationType", values);
|
||||
}
|
||||
|
||||
List<String> primaryKeywords = new LinkedList<String>();
|
||||
List<String> secondaryKeywords = new LinkedList<String>();
|
||||
Element keywordsList = XMLUtils.getSingleElement(medline,
|
||||
"KeywordList");
|
||||
if (keywordsList != null) {
|
||||
List<Element> keywords = XMLUtils.getElementList(keywordsList,
|
||||
"Keyword");
|
||||
for (Element keyword : keywords) {
|
||||
if ("Y".equals(keyword.getAttribute("MajorTopicYN"))) {
|
||||
primaryKeywords.add(keyword.getTextContent().trim());
|
||||
} else {
|
||||
secondaryKeywords.add(keyword.getTextContent().trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
if (primaryKeywords.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : primaryKeywords) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("primaryKeyword", values);
|
||||
}
|
||||
if (secondaryKeywords.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : secondaryKeywords) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("secondaryKeyword", values);
|
||||
}
|
||||
|
||||
List<String> primaryMeshHeadings = new LinkedList<String>();
|
||||
List<String> secondaryMeshHeadings = new LinkedList<String>();
|
||||
Element meshHeadingsList = XMLUtils.getSingleElement(medline,
|
||||
"MeshHeadingList");
|
||||
if (meshHeadingsList != null) {
|
||||
List<Element> meshHeadings = XMLUtils.getElementList(
|
||||
meshHeadingsList, "MeshHeading");
|
||||
for (Element meshHeading : meshHeadings) {
|
||||
if ("Y".equals(XMLUtils.getElementAttribute(meshHeading,
|
||||
"DescriptorName", "MajorTopicYN"))) {
|
||||
primaryMeshHeadings.add(XMLUtils.getElementValue(
|
||||
meshHeading, "DescriptorName"));
|
||||
} else {
|
||||
secondaryMeshHeadings.add(XMLUtils.getElementValue(
|
||||
meshHeading, "DescriptorName"));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (primaryMeshHeadings.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : primaryMeshHeadings) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("primaryMeshHeading", values);
|
||||
}
|
||||
if (secondaryMeshHeadings.size() > 0) {
|
||||
List<Value> values = new LinkedList<Value>();
|
||||
for (String s : secondaryMeshHeadings) {
|
||||
values.add(new StringValue(s));
|
||||
}
|
||||
record.addField("secondaryMeshHeading", values);
|
||||
}
|
||||
|
||||
Element paginationElement = XMLUtils.getSingleElement(article,
|
||||
"Pagination");
|
||||
if (paginationElement != null) {
|
||||
String startPage = XMLUtils.getElementValue(paginationElement,
|
||||
"StartPage");
|
||||
String endPage = XMLUtils.getElementValue(paginationElement,
|
||||
"EndPage");
|
||||
if (StringUtils.isBlank(startPage)) {
|
||||
startPage = XMLUtils.getElementValue(paginationElement,
|
||||
"MedlinePgn");
|
||||
}
|
||||
|
||||
if (startPage != null) {
|
||||
record.addValue("startPage", new StringValue(startPage));
|
||||
}
|
||||
if (endPage != null) {
|
||||
record.addValue("endPage", new StringValue(endPage));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
}
|
@@ -97,7 +97,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Collection> getCollectionHandlesMappedToWorklow(Context context, String workflowName) {
|
||||
public List<Collection> getCollectionHandlesMappedToWorkflow(Context context, String workflowName) {
|
||||
List<Collection> collectionsMapped = new ArrayList<>();
|
||||
for (String handle : this.workflowMapping.keySet()) {
|
||||
if (this.workflowMapping.get(handle).getID().equals(workflowName)) {
|
||||
@@ -107,7 +107,7 @@ public class XmlWorkflowFactoryImpl implements XmlWorkflowFactory {
|
||||
collectionsMapped.add(collection);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorklow trying to " +
|
||||
log.error("SQLException in XmlWorkflowFactoryImpl.getCollectionHandlesMappedToWorkflow trying to " +
|
||||
"retrieve collection with handle: " + handle, e);
|
||||
}
|
||||
}
|
||||
|
@@ -86,7 +86,7 @@ public interface XmlWorkflowFactory {
|
||||
* @param workflowName Name of workflow we want the collections of that are mapped to is
|
||||
* @return List of collections mapped to the requested workflow
|
||||
*/
|
||||
public List<Collection> getCollectionHandlesMappedToWorklow(Context context, String workflowName);
|
||||
public List<Collection> getCollectionHandlesMappedToWorkflow(Context context, String workflowName);
|
||||
|
||||
/**
|
||||
* Returns list of collections that are not mapped to any configured workflow, and thus use the default workflow
|
||||
|
@@ -81,7 +81,7 @@ public class Step implements BeanNameAware {
|
||||
/**
|
||||
* Get the next step based on out the outcome
|
||||
* @param outcome the outcome of the previous step
|
||||
* @return the next stepp or NULL if there is no step configured for this outcome
|
||||
* @return the next step or NULL if there is no step configured for this outcome
|
||||
*/
|
||||
public Step getNextStep(int outcome) {
|
||||
return outcomes.get(outcome);
|
||||
|
@@ -19,8 +19,14 @@
|
||||
|
||||
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<bean id="importService" class="org.dspace.importer.external.service.ImportService"
|
||||
lazy-init="false" autowire="byType" destroy-method="destroy">
|
||||
<!--If multiple importServices have been configured here but only one is to be used during the lookup step (StartSubmissionLookupStep),
|
||||
this can be accomplished by specifying the property "publication-lookup.url" to the baseAddress of the required importService
|
||||
So for example
|
||||
publication-lookup.url=https://eutils.ncbi.nlm.nih.gov/entrez/eutils/
|
||||
Will result in using the PubmedImportService for the lookup step
|
||||
Omitting this property will default to searching over all configured ImportService implementations
|
||||
-->
|
||||
<bean id="importService" class="org.dspace.importer.external.service.ImportService" lazy-init="false" autowire="byType" destroy-method="destroy">
|
||||
<property name="importSources">
|
||||
<list>
|
||||
<ref bean="pubmedImportService" />
|
||||
@@ -32,19 +38,22 @@
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<bean id="ArXivImportService"
|
||||
class="org.dspace.importer.external.arxiv.service.ArXivImportMetadataSourceServiceImpl" scope="singleton">
|
||||
<property name="metadataFieldMapping" ref="ArXivMetadataFieldMapping"/>
|
||||
<property name="baseAddress" value="http://export.arxiv.org/api/query"/>
|
||||
</bean>
|
||||
<bean id="ArXivMetadataFieldMapping"
|
||||
class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping">
|
||||
</bean>
|
||||
|
||||
<!--If multiple importServices have been configured here but only one is to be used during the lookup step (StartSubmissionLookupStep),
|
||||
this can be accomplished by specifying the property "publication-lookup.url" to the baseAddress of the required importService
|
||||
So for example
|
||||
publication-lookup.url=https://eutils.ncbi.nlm.nih.gov/entrez/eutils/
|
||||
Will result in using the PubmedImportService for the lookup step
|
||||
Omitting this property will default to searching over all configured ImportService implementations
|
||||
-->
|
||||
|
||||
<bean id="pubmedImportService"
|
||||
class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl">
|
||||
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
|
||||
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/>
|
||||
<property name="generateQueryForItem" ref="pubmedService"></property>
|
||||
<property name="supportedExtensions">
|
||||
<list>
|
||||
<value>xml</value>
|
||||
|
@@ -14,12 +14,12 @@
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataImportCLI"/>
|
||||
</bean>
|
||||
|
||||
<bean id="metadata-export" class="org.dspace.app.bulkedit.MetadataExportScriptConfiguration">
|
||||
<bean id="metadata-export" class="org.dspace.app.bulkedit.MetadataExportCliScriptConfiguration">
|
||||
<property name="description" value="Export metadata for batch editing"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/>
|
||||
</bean>
|
||||
|
||||
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
|
||||
<bean id="curate" class="org.dspace.curate.CurationCliScriptConfiguration">
|
||||
<property name="description" value="Curation tasks"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.curate.CurationCli"/>
|
||||
</bean>
|
||||
|
@@ -13,6 +13,7 @@ import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
@@ -23,12 +24,22 @@ import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
public class MetadataExportIT
|
||||
extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
@Rule
|
||||
public ExpectedException thrown = ExpectedException.none();
|
||||
|
||||
private final ConfigurationService configurationService
|
||||
= DSpaceServicesFactory.getInstance().getConfigurationService();
|
||||
|
||||
@@ -59,4 +70,34 @@ public class MetadataExportIT
|
||||
assertTrue(fileContent.contains("Donald, Smith"));
|
||||
assertTrue(fileContent.contains(String.valueOf(item.getID())));
|
||||
}
|
||||
|
||||
@Test(expected = ParseException.class)
|
||||
public void metadataExportWithoutFileParameter()
|
||||
throws IllegalAccessException, InstantiationException, ParseException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.build();
|
||||
Item item = ItemBuilder.createItem(context, collection)
|
||||
.withAuthor("Donald, Smith")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String[] args = new String[] {"metadata-export",
|
||||
"-i", String.valueOf(item.getHandle())};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
script.initialize(args, testDSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,10 +7,12 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.dspace.AbstractIntegrationTest;
|
||||
import org.dspace.app.launcher.ScriptLauncher;
|
||||
@@ -22,16 +24,25 @@ import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.ExpectedException;
|
||||
|
||||
public class MetadataImportTest extends AbstractIntegrationTest {
|
||||
|
||||
private final ItemService itemService
|
||||
= ContentServiceFactory.getInstance().getItemService();
|
||||
= ContentServiceFactory.getInstance().getItemService();
|
||||
private final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
private final CommunityService communityService
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
= ContentServiceFactory.getInstance().getCommunityService();
|
||||
|
||||
@Rule
|
||||
public ExpectedException thrown = ExpectedException.none();
|
||||
|
||||
@Test
|
||||
public void metadataImportTest() throws Exception {
|
||||
@@ -50,6 +61,7 @@ public class MetadataImportTest extends AbstractIntegrationTest {
|
||||
StringUtils.equals(
|
||||
itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY).get(0).getValue(),
|
||||
"Donald, SmithImported"));
|
||||
assertEquals(importedItem.getSubmitter(), eperson);
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
itemService.delete(context, itemService.find(context, importedItem.getID()));
|
||||
@@ -57,4 +69,24 @@ public class MetadataImportTest extends AbstractIntegrationTest {
|
||||
communityService.delete(context, communityService.find(context, community.getID()));
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@Test(expected = ParseException.class)
|
||||
public void metadataImportWithoutEPersonParameterTest()
|
||||
throws IllegalAccessException, InstantiationException, ParseException {
|
||||
String fileLocation = new File(testProps.get("test.importcsv").toString()).getAbsolutePath();
|
||||
String[] args = new String[] {"metadata-import", "-f", fileLocation, "-s"};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
script.initialize(args, testDSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.csv;
|
||||
package org.dspace.app.csv;
|
||||
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
|
||||
@@ -19,13 +19,18 @@ import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.bulkedit.MetadataImportException;
|
||||
import org.dspace.app.bulkedit.MetadataImportInvalidHeadingException;
|
||||
import org.dspace.app.rest.test.AbstractEntityIntegrationTest;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EntityTypeBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.RelationshipTypeBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.EntityType;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataValue;
|
||||
@@ -35,35 +40,57 @@ import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataValueService;
|
||||
import org.dspace.content.service.RelationshipService;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Created by: Andrew Wood
|
||||
* Date: 26 Jul 2019
|
||||
*/
|
||||
public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest {
|
||||
public class CSVMetadataImportReferenceIT extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
//Common collection to utilize for test
|
||||
private Collection col1;
|
||||
|
||||
@Autowired
|
||||
private RelationshipService relationshipService;
|
||||
private RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
|
||||
private ItemService itemService = ContentServiceFactory.getInstance().getItemService();
|
||||
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
Community parentCommunity;
|
||||
|
||||
/**
|
||||
* Setup testing enviorment
|
||||
*/
|
||||
@Before
|
||||
public void setup() {
|
||||
public void setup() throws SQLException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
|
||||
col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build();
|
||||
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
EntityType publication = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication").build();
|
||||
EntityType person = EntityTypeBuilder.createEntityTypeBuilder(context, "Person").build();
|
||||
EntityType project = EntityTypeBuilder.createEntityTypeBuilder(context, "Project").build();
|
||||
EntityType orgUnit = EntityTypeBuilder.createEntityTypeBuilder(context, "OrgUnit").build();
|
||||
|
||||
RelationshipTypeBuilder
|
||||
.createRelationshipTypeBuilder(context, publication, person, "isAuthorOfPublication",
|
||||
"isPublicationOfAuthor", 0, null, 0,
|
||||
null).withCopyToLeft(false).withCopyToRight(true).build();
|
||||
|
||||
RelationshipTypeBuilder.createRelationshipTypeBuilder(context, publication, project, "isProjectOfPublication",
|
||||
"isPublicationOfProject", 0, null, 0,
|
||||
null).withCopyToRight(true).build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@@ -102,8 +129,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test
|
||||
public void testSingleMdRef() throws Exception {
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
|
||||
"+,Person,," + col1.getHandle() + ",0",
|
||||
"+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"};
|
||||
"+,Person,," + col1.getHandle() + ",0",
|
||||
"+,Publication,dc.identifier.other:0," + col1.getHandle() + ",1"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[1], items[0], 1, "left", 0);
|
||||
}
|
||||
@@ -119,7 +146,7 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
performImportScript(csvLines, false);
|
||||
Item[] items = new Item[csvLines.length - 1];
|
||||
for (int i = 0; i < items.length; i++) {
|
||||
items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString());
|
||||
items[i] = itemService.findByIdOrLegacyId(context, getUUIDByIdentifierOther("" + i).toString());
|
||||
}
|
||||
return items;
|
||||
}
|
||||
@@ -132,8 +159,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testSingleRowNameRef() throws Exception {
|
||||
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
|
||||
"dc.identifier.other",
|
||||
"+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"};
|
||||
"+,Test Item 1,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Test Item 2,Publication,rowName:idVal," + col1.getHandle() + ",anything,1"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[1], items[0], 1, "left", 0);
|
||||
}
|
||||
@@ -145,9 +172,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test
|
||||
public void testMultiMdRef() throws Exception {
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
|
||||
"+,Person,," + col1.getHandle() + ",0",
|
||||
"+,Person,," + col1.getHandle() + ",1",
|
||||
"+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"};
|
||||
"+,Person,," + col1.getHandle() + ",0",
|
||||
"+,Person,," + col1.getHandle() + ",1",
|
||||
"+,Publication,dc.identifier.other:0||dc.identifier.other:1," + col1.getHandle() + ",2"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[2], items[0], 1, "left", 0);
|
||||
assertRelationship(items[2], items[1], 1, "left", 1);
|
||||
@@ -160,9 +187,9 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test
|
||||
public void testMultiRowNameRef() throws Exception {
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other,rowName",
|
||||
"+,Person,," + col1.getHandle() + ",0,val1",
|
||||
"+,Person,," + col1.getHandle() + ",1,val2",
|
||||
"+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"};
|
||||
"+,Person,," + col1.getHandle() + ",0,val1",
|
||||
"+,Person,," + col1.getHandle() + ",1,val2",
|
||||
"+,Publication,rowName:val1||rowName:val2," + col1.getHandle() + ",2,val3"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[2], items[0], 1, "left", 0);
|
||||
assertRelationship(items[2], items[1], 1, "left", 1);
|
||||
@@ -176,11 +203,16 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testSingleUUIDReference() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
.withTitle("Author1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other",
|
||||
"+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"};
|
||||
"+,Publication," + person.getID().toString() + "," + col1.getHandle() + ",anything,0"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[0], person, 1, "left", 0);
|
||||
}
|
||||
@@ -193,12 +225,21 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testMultiUUIDReference() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Author1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
Item person2 = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
.withTitle("Author2")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, John")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("John")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName,dc.identifier.other",
|
||||
"+,Publication," + person.getID().toString() + "||" + person2.getID().toString() + "," +
|
||||
col1.getHandle() + ",anything,0"};
|
||||
@@ -216,12 +257,16 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
|
||||
"dc.identifier.other",
|
||||
"+,Person2,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"};
|
||||
"+,Person2,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Pub1,Publication,dc.title:Person||dc.title:Person2," + col1.getHandle() + ",anything,1"};
|
||||
context.restoreAuthSystemState();
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[1], person, 1, "left", 0);
|
||||
@@ -238,16 +283,25 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
Item person2 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person2")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
.withTitle("Person2")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, John")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("John")
|
||||
.withRelationshipType("Person")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
|
||||
"dc.identifier.other",
|
||||
"+,Person3,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Person3,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Pub1,Publication," + person.getID() + "||dc.title:Person2||rowName:idVal," +
|
||||
col1.getHandle() + ",anything,1"};
|
||||
Item[] items = runImport(csv);
|
||||
@@ -264,8 +318,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testRefWithSpecialChar() throws Exception {
|
||||
String[] csv = {"id,dc.title,relationship.type,relation.isAuthorOfPublication,collection,rowName," +
|
||||
"dc.identifier.other",
|
||||
"+,Person:,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"};
|
||||
"+,Person:,Person,," + col1.getHandle() + ",idVal,0",
|
||||
"+,Pub1,Publication,dc.title:Person:," + col1.getHandle() + ",anything,1"};
|
||||
Item[] items = runImport(csv);
|
||||
assertRelationship(items[1], items[0], 1, "left", 0);
|
||||
}
|
||||
@@ -300,14 +354,25 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test(expected = MetadataImportException.class)
|
||||
public void testNonUniqueMDRefInDb() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
Item person2 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person2")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, John")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("John")
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
|
||||
"+,Publication,dc.identifier.other:1," + col1.getHandle() + ",2"};
|
||||
@@ -320,10 +385,15 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test(expected = MetadataImportException.class)
|
||||
public void testNonUniqueMDRefInBoth() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
Item person = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("1")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,dc.identifier.other",
|
||||
"+,Person,," + col1.getHandle() + ",1",
|
||||
@@ -382,8 +452,10 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testInvalidRelationshipArchivedOrigin() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item testItem = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("OrgUnit")
|
||||
.build();
|
||||
.withTitle("OrgUnit")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("OrgUnit")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,relationship.type,relation.isAuthorOfPublication,collection,rowName",
|
||||
"+,Person,," + col1.getHandle() + ",1" +
|
||||
@@ -398,6 +470,8 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
public void testInvalidRelationshipArchivedTarget() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item testItem = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("OrgUnit")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("OrgUnit")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
@@ -413,26 +487,42 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test
|
||||
public void testValidRelationshipNoDefinedTypesInCSV() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Item testItemOne = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("testItemOne")
|
||||
.build();
|
||||
Item testItemTwo = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Publication")
|
||||
.withIdentifierOther("testItemTwo")
|
||||
.build();
|
||||
Item testItemThree = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Project")
|
||||
.withIdentifierOther("testItemThree")
|
||||
.build();
|
||||
|
||||
Item testItem = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Person")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald")
|
||||
.withPersonIdentifierLastName("Smith")
|
||||
.withPersonIdentifierFirstName("Donald")
|
||||
.withRelationshipType("Person")
|
||||
.withIdentifierOther("testItemOne")
|
||||
.build();
|
||||
|
||||
|
||||
Item testItem2 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Publication")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Publication")
|
||||
.withIdentifierOther("testItemTwo")
|
||||
.build();
|
||||
|
||||
|
||||
Item testItem3 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Project")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Project")
|
||||
.withIdentifierOther("testItemThree")
|
||||
.build();
|
||||
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,relation.isAuthorOfPublication,relation.isPublicationOfProject,collection",
|
||||
testItemOne.getID().toString() + ",,," + col1.getHandle(),
|
||||
testItemTwo.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(),
|
||||
testItemThree.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()};
|
||||
testItem.getID().toString() + ",,," + col1.getHandle(),
|
||||
testItem2.getID().toString() + ",dc.identifier.other:testItemOne,," + col1.getHandle(),
|
||||
testItem3.getID().toString() + ",,dc.identifier.other:testItemTwo," + col1.getHandle()};
|
||||
performImportScript(csv, false);
|
||||
assertRelationship(testItemTwo, testItemOne, 1, "left", 0);
|
||||
assertRelationship(testItemTwo, testItemThree, 1, "left", 0);
|
||||
assertRelationship(testItem2, testItem, 1, "left", 0);
|
||||
assertRelationship(testItem2, testItem3, 1, "left", 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -455,14 +545,17 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
@Test(expected = MetadataImportException.class)
|
||||
public void testInvalidTypeNameDefined() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Item testItem = ItemBuilder.createItem(context, col1)
|
||||
.withRelationshipType("Publication")
|
||||
.build();
|
||||
.withTitle("Publication")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Publication")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] csv = {"id,collection,relationship.type,dc.title," +
|
||||
"relation.isProjectOfPublication,relation.isPublicationOfProject",
|
||||
"+," + col1.getHandle() + ",Project,Title," +
|
||||
testItem.getID().toString() + "," + testItem.getID().toString() };
|
||||
testItem.getID().toString() + "," + testItem.getID().toString()};
|
||||
performImportScript(csv, true);
|
||||
}
|
||||
|
||||
@@ -477,17 +570,34 @@ public class CSVMetadataImportReferenceIT extends AbstractEntityIntegrationTest
|
||||
}
|
||||
out.flush();
|
||||
out.close();
|
||||
String fileLocation = csvFile.getAbsolutePath();
|
||||
try {
|
||||
String[] args = null;
|
||||
if (validateOnly) {
|
||||
return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com",
|
||||
"-s", "-v");
|
||||
args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s", "-v"};
|
||||
} else {
|
||||
return runDSpaceScript("metadata-import", "-f", csvFile.getAbsolutePath(), "-e", "admin@email.com",
|
||||
"-s");
|
||||
args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s",};
|
||||
}
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
script.initialize(args, testDSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
}
|
||||
if (testDSpaceRunnableHandler.getException() != null) {
|
||||
throw testDSpaceRunnableHandler.getException();
|
||||
}
|
||||
} finally {
|
||||
csvFile.delete();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
@@ -146,6 +146,32 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
|
||||
}
|
||||
return (B) this;
|
||||
}
|
||||
/**
|
||||
* Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other
|
||||
* READ permissions will be removed
|
||||
*
|
||||
* @param dso
|
||||
* the DSpaceObject on which grant the permission
|
||||
* @param eperson
|
||||
* the eperson that will be granted of the permission
|
||||
* @return the builder properly configured to build the object with the additional admin permission
|
||||
*/
|
||||
protected <B extends AbstractDSpaceObjectBuilder<T>> B setAdminPermission(DSpaceObject dso, EPerson eperson,
|
||||
Date startDate) {
|
||||
try {
|
||||
|
||||
ResourcePolicy rp = authorizeService.createOrModifyPolicy(null, context, null, null,
|
||||
eperson, startDate, Constants.ADMIN,
|
||||
"Integration Test", dso);
|
||||
if (rp != null) {
|
||||
resourcePolicyService.update(context, rp);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return handleException(e);
|
||||
}
|
||||
return (B) this;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Support method to grant {@link Constants#REMOVE} permission to a specific eperson
|
||||
|
@@ -19,6 +19,7 @@ import org.dspace.content.MetadataSchemaEnum;
|
||||
import org.dspace.content.WorkspaceItem;
|
||||
import org.dspace.content.service.DSpaceObjectService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
|
||||
/**
|
||||
@@ -126,6 +127,19 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an admin group for the collection with the specified members
|
||||
*
|
||||
* @param members epersons to add to the admin group
|
||||
* @return this builder
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
*/
|
||||
public ItemBuilder withAdminUser(EPerson ePerson) throws SQLException, AuthorizeException {
|
||||
return setAdminPermission(item, ePerson, null);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Item build() {
|
||||
try {
|
||||
|
@@ -166,6 +166,11 @@ public class WorkspaceItemBuilder extends AbstractBuilder<WorkspaceItem, Workspa
|
||||
public WorkspaceItemBuilder withAbstract(final String subject) {
|
||||
return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject);
|
||||
}
|
||||
|
||||
public WorkspaceItemBuilder withRelationshipType(final String relationshipType) {
|
||||
return addMetadataValue("relationship", "type", null, relationshipType);
|
||||
}
|
||||
|
||||
public WorkspaceItemBuilder grantLicense() {
|
||||
Item item = workspaceItem.getItem();
|
||||
String license;
|
||||
|
76
dspace-api/src/test/java/org/dspace/curate/CurationTest.java
Normal file
76
dspace-api/src/test/java/org/dspace/curate/CurationTest.java
Normal file
@@ -0,0 +1,76 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.curate;
|
||||
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.junit.Test;
|
||||
|
||||
public class CurationTest extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
@Test(expected = ParseException.class)
|
||||
public void curationWithoutEPersonParameterTest() throws Exception {
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] args = new String[] {"curate", "-t", CurationClientOptions.getTaskOptions().get(0),
|
||||
"-i", collection.getHandle()};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
script.initialize(args, testDSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void curationWithEPersonParameterTest() throws Exception {
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
String[] args = new String[] {"curate", "-e", "admin@email.com", "-t",
|
||||
CurationClientOptions.getTaskOptions().get(0), "-i", collection.getHandle()};
|
||||
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
|
||||
|
||||
ScriptService scriptService = ScriptServiceFactory.getInstance().getScriptService();
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(args[0]);
|
||||
|
||||
DSpaceRunnable script = null;
|
||||
if (scriptConfiguration != null) {
|
||||
script = scriptService.createDSpaceRunnableForScriptConfiguration(scriptConfiguration);
|
||||
}
|
||||
if (script != null) {
|
||||
script.initialize(args, testDSpaceRunnableHandler, null);
|
||||
script.run();
|
||||
}
|
||||
}
|
||||
}
|
@@ -54,7 +54,7 @@ public class CuratorTest extends AbstractUnitTest {
|
||||
*/
|
||||
@Test
|
||||
public void testCurate_DSpaceObject() throws Exception {
|
||||
System.out.println("curate");
|
||||
CoreServiceFactory.getInstance().getPluginService().clearNamedPluginClasses();
|
||||
|
||||
final String TASK_NAME = "dummyTask";
|
||||
|
||||
@@ -114,6 +114,6 @@ public class CuratorTest extends AbstractUnitTest {
|
||||
curator.curate(context, item);
|
||||
|
||||
assertEquals(Curator.CURATE_SUCCESS, curator.getStatus(TASK_NAME));
|
||||
assertEquals(reporterOutput.toString(), "No operation performed on testHandle");
|
||||
assertEquals("No operation performed on testHandle", reporterOutput.toString());
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.content.MetadataField;
|
||||
import org.dspace.content.MetadataSchema;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
||||
import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Test class for {@link MetadataFieldIndexFactoryImpl}
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 23/07/2020
|
||||
*/
|
||||
public class MetadataFieldIndexFactoryImplTest extends AbstractUnitTest {
|
||||
private MetadataSchemaService metadataSchemaService =
|
||||
ContentServiceFactory.getInstance().getMetadataSchemaService();
|
||||
private MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
|
||||
|
||||
private String schemaName = "schema1";
|
||||
private String elemName1 = "elem1";
|
||||
private String elemName2 = "elem2";
|
||||
private String qualName1 = "qual1";
|
||||
|
||||
private MetadataSchema schema;
|
||||
private MetadataField field1;
|
||||
private MetadataField field2;
|
||||
|
||||
@Test
|
||||
public void test_buildDocument_withQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
schema = metadataSchemaService.create(context, schemaName, "htpp://test/schema/");
|
||||
field1 = metadataFieldService.create(context, schema, elemName1, qualName1, "note 1");
|
||||
|
||||
MetadataFieldIndexFactoryImpl fieldIndexFactory = new MetadataFieldIndexFactoryImpl();
|
||||
IndexableMetadataField indexableMetadataField = new IndexableMetadataField(this.field1);
|
||||
SolrInputDocument solrInputDocument = fieldIndexFactory.buildDocument(context, indexableMetadataField);
|
||||
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME + "_keyword")
|
||||
.contains(this.field1.getMetadataSchema().getName()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME + "_keyword")
|
||||
.contains(this.field1.getElement()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.QUALIFIER_FIELD_NAME + "_keyword")
|
||||
.contains(this.field1.getQualifier()));
|
||||
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword")
|
||||
.contains(this.field1.getQualifier()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword")
|
||||
.contains(this.field1.getElement() + "." + this.field1.getQualifier()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword")
|
||||
.contains(this.field1.toString('.')));
|
||||
|
||||
metadataSchemaService.delete(context, schema);
|
||||
metadataFieldService.delete(context, field1);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test_buildDocument_noQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
schema = metadataSchemaService.create(context, schemaName, "htpp://test/schema/");
|
||||
field2 = metadataFieldService.create(context, schema, elemName2, null, "note 2");
|
||||
MetadataFieldIndexFactoryImpl fieldIndexFactory = new MetadataFieldIndexFactoryImpl();
|
||||
IndexableMetadataField indexableMetadataField = new IndexableMetadataField(this.field2);
|
||||
SolrInputDocument solrInputDocument = fieldIndexFactory.buildDocument(context, indexableMetadataField);
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME + "_keyword")
|
||||
.contains(this.field2.getMetadataSchema().getName()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME + "_keyword")
|
||||
.contains(this.field2.getElement()));
|
||||
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword")
|
||||
.contains(this.field2.getElement()));
|
||||
assertTrue(solrInputDocument.getFieldValues(MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS + "_keyword")
|
||||
.contains(this.field2.toString('.')));
|
||||
|
||||
metadataSchemaService.delete(context, schema);
|
||||
metadataFieldService.delete(context, field2);
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
}
|
@@ -78,6 +78,32 @@ public class EPersonTest extends AbstractUnitTest {
|
||||
super.destroy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPreferences() throws Exception {
|
||||
|
||||
String cookies =
|
||||
"{" +
|
||||
"\"token_item\":true," +
|
||||
"\"impersonation\":true," +
|
||||
"\"redirect\":true," +
|
||||
"\"language\":true," +
|
||||
"\"klaro\":true," +
|
||||
"\"google-analytics\":false" +
|
||||
"}";
|
||||
|
||||
ePersonService.addMetadata(context, eperson, "dspace", "agreements", "cookies", null, cookies);
|
||||
ePersonService.addMetadata(context, eperson, "dspace", "agreements", "end-user", null, "true");
|
||||
context.commit();
|
||||
|
||||
assertEquals(
|
||||
cookies,
|
||||
ePersonService.getMetadataFirstValue(eperson, "dspace", "agreements", "cookies", null)
|
||||
);
|
||||
assertEquals(
|
||||
"true",
|
||||
ePersonService.getMetadataFirstValue(eperson, "dspace", "agreements", "end-user", null)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test of equals method, of class EPerson.
|
||||
|
@@ -116,12 +116,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
|
||||
@Test
|
||||
public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException {
|
||||
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection);
|
||||
assertEquals("defaultWorkflow", workflow.getID());
|
||||
assertEquals(XmlWorkflowFactoryImpl.LEGACY_WORKFLOW_NAME, workflow.getID());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void workflowMapping_MappedCollection() throws WorkflowConfigurationException {
|
||||
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection);
|
||||
assertEquals("selectSingleReviewer", workflow.getID());
|
||||
assertEquals( "selectSingleReviewer", workflow.getID());
|
||||
}
|
||||
}
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import javax.servlet.Filter;
|
||||
|
||||
@@ -16,6 +18,7 @@ import org.dspace.app.rest.parameter.resolver.SearchFilterResolver;
|
||||
import org.dspace.app.rest.utils.ApplicationConfig;
|
||||
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
|
||||
import org.dspace.app.rest.utils.DSpaceKernelInitializer;
|
||||
import org.dspace.app.sitemap.GenerateSitemaps;
|
||||
import org.dspace.app.util.DSpaceContextListener;
|
||||
import org.dspace.utils.servlet.DSpaceWebappServletFilter;
|
||||
import org.slf4j.Logger;
|
||||
@@ -28,6 +31,8 @@ import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.hateoas.server.LinkRelationProvider;
|
||||
import org.springframework.lang.NonNull;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.web.context.request.RequestContextListener;
|
||||
import org.springframework.web.cors.CorsConfiguration;
|
||||
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
|
||||
@@ -49,6 +54,7 @@ import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||
* @author Tim Donohue
|
||||
*/
|
||||
@SpringBootApplication
|
||||
@EnableScheduling
|
||||
public class Application extends SpringBootServletInitializer {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(Application.class);
|
||||
@@ -56,6 +62,11 @@ public class Application extends SpringBootServletInitializer {
|
||||
@Autowired
|
||||
private ApplicationConfig configuration;
|
||||
|
||||
@Scheduled(cron = "${sitemap.cron:-}")
|
||||
public void generateSitemap() throws IOException, SQLException {
|
||||
GenerateSitemaps.generateSitemapsScheduled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Override the default SpringBootServletInitializer.configure() method,
|
||||
* passing it this Application class.
|
||||
|
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.sql.SQLException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.catalina.connector.ClientAbortException;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.utils.ContextUtil;
|
||||
import org.dspace.app.rest.utils.MultipartFileSender;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
|
||||
import org.springframework.stereotype.Controller;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
|
||||
/**
|
||||
* This is a specialized controller to provide access to the sitemap files, generated by
|
||||
* {@link org.dspace.app.sitemap.GenerateSitemaps}
|
||||
*
|
||||
* The mapping for requested endpoint try to resolve a valid sitemap file name, for example
|
||||
* <pre>
|
||||
* {@code
|
||||
* https://<dspace.server.url>/sitemaps/26453b4d-e513-44e8-8d5b-395f62972eff/sitemap0.html
|
||||
* }
|
||||
* </pre>
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 08/07/2020
|
||||
*/
|
||||
@Controller
|
||||
@RequestMapping("/${sitemap.path:sitemaps}")
|
||||
public class SitemapRestController {
|
||||
|
||||
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SitemapRestController.class);
|
||||
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
// Most file systems are configured to use block sizes of 4096 or 8192 and our buffer should be a multiple of that.
|
||||
private static final int BUFFER_SIZE = 4096 * 10;
|
||||
|
||||
/**
|
||||
* Tries to retrieve a matching sitemap file in configured location
|
||||
*
|
||||
* @param name the name of the requested sitemap file
|
||||
* @param response the HTTP response
|
||||
* @param request the HTTP request
|
||||
* @throws SQLException if db error while completing DSpace context
|
||||
* @throws IOException if IO error surrounding sitemap file
|
||||
*/
|
||||
@GetMapping("/{name}")
|
||||
public void retrieve(@PathVariable String name, HttpServletResponse response,
|
||||
HttpServletRequest request) throws IOException, SQLException {
|
||||
// Find sitemap with given name in dspace/sitemaps
|
||||
File foundSitemapFile = null;
|
||||
File sitemapOutputDir = new File(configurationService.getProperty("sitemap.dir"));
|
||||
if (sitemapOutputDir.exists() && sitemapOutputDir.isDirectory()) {
|
||||
// List of all files and directories inside sitemapOutputDir
|
||||
File sitemapFilesList[] = sitemapOutputDir.listFiles();
|
||||
for (File sitemapFile : sitemapFilesList) {
|
||||
if (name.equalsIgnoreCase(sitemapFile.getName())) {
|
||||
if (sitemapFile.isFile()) {
|
||||
foundSitemapFile = sitemapFile;
|
||||
} else {
|
||||
throw new ResourceNotFoundException(
|
||||
"Directory with name " + name + " in " + sitemapOutputDir.getAbsolutePath() +
|
||||
" found, but no file.");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ResourceNotFoundException(
|
||||
"Sitemap directory in " + sitemapOutputDir.getAbsolutePath() + " does not " +
|
||||
"exist, either sitemaps have not been generated (./dspace generate-sitemaps)," +
|
||||
" or are located elsewhere (config used: sitemap.dir).");
|
||||
}
|
||||
if (foundSitemapFile == null) {
|
||||
throw new ResourceNotFoundException(
|
||||
"Could not find sitemap file with name " + name + " in " + sitemapOutputDir.getAbsolutePath());
|
||||
} else {
|
||||
// return found sitemap file
|
||||
this.returnSitemapFile(foundSitemapFile, response, request);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends back the matching sitemap file as a MultipartFile, with the headers set with details of the file
|
||||
* (content, size, name, last modified)
|
||||
*
|
||||
* @param foundSitemapFile the found sitemap file, with matching name as in request path
|
||||
* @param response the HTTP response
|
||||
* @param request the HTTP request
|
||||
* @throws SQLException if db error while completing DSpace context
|
||||
* @throws IOException if IO error surrounding sitemap file
|
||||
*/
|
||||
private void returnSitemapFile(File foundSitemapFile, HttpServletResponse response, HttpServletRequest request)
|
||||
throws SQLException, IOException {
|
||||
// Pipe the bits
|
||||
try (InputStream is = new FileInputStream(foundSitemapFile)) {
|
||||
MultipartFileSender sender = MultipartFileSender
|
||||
.fromInputStream(is)
|
||||
.withBufferSize(BUFFER_SIZE)
|
||||
.withFileName(foundSitemapFile.getName())
|
||||
.withLength(foundSitemapFile.length())
|
||||
.withMimetype(Files.probeContentType(foundSitemapFile.toPath()))
|
||||
.with(request)
|
||||
.with(response);
|
||||
|
||||
sender.withLastModified(foundSitemapFile.lastModified());
|
||||
|
||||
// Determine if we need to send the file as a download or if the browser can open it inline
|
||||
long dispositionThreshold = configurationService.getLongProperty("webui.content_disposition_threshold");
|
||||
if (dispositionThreshold >= 0 && foundSitemapFile.length() > dispositionThreshold) {
|
||||
sender.withDisposition(MultipartFileSender.CONTENT_DISPOSITION_ATTACHMENT);
|
||||
}
|
||||
|
||||
Context context = ContextUtil.obtainContext(request);
|
||||
|
||||
// We have all the data we need, close the connection to the database so that it doesn't stay open during
|
||||
// download/streaming
|
||||
context.complete();
|
||||
|
||||
// Send the data
|
||||
if (sender.isValid()) {
|
||||
sender.serveResource();
|
||||
}
|
||||
|
||||
} catch (ClientAbortException e) {
|
||||
log.debug("Client aborted the request before the download was completed. " +
|
||||
"Client is probably switching to a Range request.", e);
|
||||
}
|
||||
}
|
||||
}
|
@@ -12,13 +12,11 @@ import java.util.List;
|
||||
import javax.annotation.Nullable;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.dspace.app.rest.converter.ConverterService;
|
||||
import org.dspace.app.rest.model.CollectionRest;
|
||||
import org.dspace.app.rest.model.WorkflowDefinitionRest;
|
||||
import org.dspace.app.rest.projection.Projection;
|
||||
import org.dspace.app.rest.repository.AbstractDSpaceRestRepository;
|
||||
import org.dspace.app.rest.repository.LinkRestRepository;
|
||||
import org.dspace.app.rest.utils.Utils;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.xmlworkflow.factory.XmlWorkflowFactory;
|
||||
@@ -43,12 +41,6 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR
|
||||
@Autowired
|
||||
protected XmlWorkflowFactory xmlWorkflowFactory;
|
||||
|
||||
@Autowired
|
||||
protected ConverterService converter;
|
||||
|
||||
@Autowired
|
||||
protected Utils utils;
|
||||
|
||||
/**
|
||||
* GET endpoint that returns the list of collections that make an explicit use of the workflow-definition.
|
||||
* If a collection doesn't specify the workflow-definition to be used, the default mapping applies,
|
||||
@@ -69,10 +61,10 @@ public class WorkflowDefinitionCollectionsLinkRepository extends AbstractDSpaceR
|
||||
if (xmlWorkflowFactory.isDefaultWorkflow(workflowName)) {
|
||||
collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getAllNonMappedCollectionsHandles(context));
|
||||
}
|
||||
collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorklow(context,
|
||||
collectionsMappedToWorkflow.addAll(xmlWorkflowFactory.getCollectionHandlesMappedToWorkflow(context,
|
||||
workflowName));
|
||||
Pageable pageable = optionalPageable != null ? optionalPageable : PageRequest.of(0, 20);
|
||||
return converter.toRestPage(collectionsMappedToWorkflow, pageable,
|
||||
return super.converter.toRestPage(collectionsMappedToWorkflow, pageable,
|
||||
projection);
|
||||
} else {
|
||||
throw new ResourceNotFoundException("No workflow with name " + workflowName + " is configured");
|
||||
|
@@ -14,11 +14,13 @@ import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
|
||||
import org.dspace.app.rest.model.BaseObjectRest;
|
||||
import org.dspace.app.rest.model.CollectionRest;
|
||||
import org.dspace.app.rest.model.CommunityRest;
|
||||
import org.dspace.app.rest.model.ItemRest;
|
||||
import org.dspace.app.rest.model.SiteRest;
|
||||
import org.dspace.app.rest.utils.Utils;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
@@ -53,6 +55,10 @@ public class AdministratorOfFeature implements AuthorizationFeature {
|
||||
Collection collection = (Collection) utils.getDSpaceAPIObjectFromRest(context, object);
|
||||
return authService.isAdmin(context, collection);
|
||||
}
|
||||
if (object instanceof ItemRest) {
|
||||
Item item = (Item) utils.getDSpaceAPIObjectFromRest(context, object);
|
||||
return authService.isAdmin(context, item);
|
||||
}
|
||||
}
|
||||
return authService.isAdmin(context);
|
||||
}
|
||||
@@ -62,7 +68,8 @@ public class AdministratorOfFeature implements AuthorizationFeature {
|
||||
return new String[]{
|
||||
SiteRest.CATEGORY + "." + SiteRest.NAME,
|
||||
CommunityRest.CATEGORY + "." + CommunityRest.NAME,
|
||||
CollectionRest.CATEGORY + "." + CollectionRest.NAME
|
||||
CollectionRest.CATEGORY + "." + CollectionRest.NAME,
|
||||
ItemRest.CATEGORY + "." + ItemRest.NAME
|
||||
};
|
||||
}
|
||||
}
|
@@ -34,6 +34,7 @@ import org.dspace.app.rest.security.DSpacePermissionEvaluator;
|
||||
import org.dspace.app.rest.security.WebSecurityExpressionEvaluator;
|
||||
import org.dspace.app.rest.utils.Utils;
|
||||
import org.dspace.services.RequestService;
|
||||
import org.springframework.aop.support.AopUtils;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.config.BeanDefinition;
|
||||
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
|
||||
@@ -51,6 +52,8 @@ import org.springframework.stereotype.Service;
|
||||
/**
|
||||
* Converts domain objects from the DSpace service layer to rest objects, and from rest objects to resource
|
||||
* objects, applying {@link Projection}s where applicable.
|
||||
*
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science dot it)
|
||||
*/
|
||||
@Service
|
||||
public class ConverterService {
|
||||
@@ -149,14 +152,30 @@ public class ConverterService {
|
||||
DSpaceRestRepository repositoryToUse = utils
|
||||
.getResourceRepositoryByCategoryAndModel(baseObjectRest.getCategory(), baseObjectRest.getType());
|
||||
Annotation preAuthorize = null;
|
||||
for (Method m : repositoryToUse.getClass().getMethods()) {
|
||||
int maxDepth = 0;
|
||||
// DS-4530 exclude the AOP Proxy from determining the annotations
|
||||
for (Method m : AopUtils.getTargetClass(repositoryToUse).getMethods()) {
|
||||
if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) {
|
||||
preAuthorize = AnnotationUtils.findAnnotation(m, PreAuthorize.class);
|
||||
int depth = howManySuperclass(m.getDeclaringClass());
|
||||
if (depth > maxDepth) {
|
||||
preAuthorize = AnnotationUtils.findAnnotation(m, PreAuthorize.class);
|
||||
maxDepth = depth;
|
||||
}
|
||||
}
|
||||
}
|
||||
return preAuthorize;
|
||||
}
|
||||
|
||||
private int howManySuperclass(Class<?> declaringClass) {
|
||||
Class curr = declaringClass;
|
||||
int count = 0;
|
||||
while (curr != Object.class) {
|
||||
curr = curr.getSuperclass();
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
private Annotation getDefaultFindOnePreAuthorize() {
|
||||
for (Method m : DSpaceRestRepository.class.getMethods()) {
|
||||
if (StringUtils.equalsIgnoreCase(m.getName(), "findOne")) {
|
||||
|
@@ -40,6 +40,8 @@ import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExcep
|
||||
* @author Tom Desair (tom dot desair at atmire dot com)
|
||||
* @author Frederic Van Reet (frederic dot vanreet at atmire dot com)
|
||||
* @author Andrea Bollini (andrea.bollini at 4science.it)
|
||||
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
|
||||
*
|
||||
*/
|
||||
@ControllerAdvice
|
||||
public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionHandler {
|
||||
|
@@ -18,6 +18,7 @@ import org.dspace.app.rest.RestResourceController;
|
||||
*/
|
||||
public class MetadataFieldRest extends BaseObjectRest<Integer> {
|
||||
public static final String NAME = "metadatafield";
|
||||
public static final String NAME_PLURAL = "metadatafields";
|
||||
public static final String CATEGORY = RestAddressableModel.CORE;
|
||||
|
||||
@JsonIgnore
|
||||
|
@@ -89,10 +89,10 @@ public class ExternalSourceRestRepository extends DSpaceRestRepository<ExternalS
|
||||
}
|
||||
|
||||
@Override
|
||||
@PreAuthorize("permitAll()")
|
||||
public Page<ExternalSourceRest> findAll(Context context, Pageable pageable) {
|
||||
List<ExternalDataProvider> externalSources = externalDataService.getExternalDataProviders();
|
||||
return converter.toRestPage(externalSources, pageable, externalSources.size(),
|
||||
utils.obtainProjection());
|
||||
return converter.toRestPage(externalSources, pageable, utils.obtainProjection());
|
||||
}
|
||||
|
||||
public Class<ExternalSourceRest> getDomainClass() {
|
||||
|
@@ -9,9 +9,11 @@ package org.dspace.app.rest.repository;
|
||||
|
||||
import static java.lang.Integer.parseInt;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_EQUALS;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
@@ -19,6 +21,8 @@ import javax.servlet.http.HttpServletRequest;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.gson.Gson;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.Parameter;
|
||||
import org.dspace.app.rest.SearchRestMethod;
|
||||
import org.dspace.app.rest.exception.DSpaceBadRequestException;
|
||||
@@ -31,6 +35,13 @@ import org.dspace.content.NonUniqueMetadataException;
|
||||
import org.dspace.content.service.MetadataFieldService;
|
||||
import org.dspace.content.service.MetadataSchemaService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.DiscoverQuery;
|
||||
import org.dspace.discovery.DiscoverResult;
|
||||
import org.dspace.discovery.IndexableObject;
|
||||
import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.indexobject.IndexableMetadataField;
|
||||
import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -45,6 +56,10 @@ import org.springframework.stereotype.Component;
|
||||
*/
|
||||
@Component(MetadataFieldRest.CATEGORY + "." + MetadataFieldRest.NAME)
|
||||
public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFieldRest, Integer> {
|
||||
/**
|
||||
* log4j logger
|
||||
*/
|
||||
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataFieldRestRepository.class);
|
||||
|
||||
@Autowired
|
||||
MetadataFieldService metadataFieldService;
|
||||
@@ -52,6 +67,9 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
@Autowired
|
||||
MetadataSchemaService metadataSchemaService;
|
||||
|
||||
@Autowired
|
||||
private SearchService searchService;
|
||||
|
||||
@Override
|
||||
@PreAuthorize("permitAll()")
|
||||
public MetadataFieldRest findOne(Context context, Integer id) {
|
||||
@@ -79,7 +97,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
|
||||
@SearchRestMethod(name = "bySchema")
|
||||
public Page<MetadataFieldRest> findBySchema(@Parameter(value = "schema", required = true) String schemaName,
|
||||
Pageable pageable) {
|
||||
Pageable pageable) {
|
||||
try {
|
||||
Context context = obtainContext();
|
||||
MetadataSchema schema = metadataSchemaService.find(context, schemaName);
|
||||
@@ -93,6 +111,108 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Endpoint for the search in the {@link MetadataField} objects by various different params representing the
|
||||
* field name. Query being a partial
|
||||
*
|
||||
* @param schemaName an exact match of the prefix of the metadata schema (e.g. "dc", "dcterms", "eperson")
|
||||
* @param elementName an exact match of the field's element (e.g. "contributor", "title")
|
||||
* @param qualifierName an exact match of the field's qualifier (e.g. "author", "alternative")
|
||||
* @param query part of the fully qualified field, should start with the start of the schema, element or
|
||||
* qualifier (e.g. "dc.ti", "contributor", "auth", "contributor.ot")
|
||||
* @param exactName exactName, The exact fully qualified field, should use the syntax schema.element
|
||||
* .qualifier or schema.element if no qualifier exists (e.g. "dc.title", "dc.contributor
|
||||
* .author"). It will only return one value if there's an exact match
|
||||
* @param pageable the pagination options
|
||||
* @return List of {@link MetadataFieldRest} objects representing all {@link MetadataField} objects that match
|
||||
* the given params
|
||||
*/
|
||||
@SearchRestMethod(name = "byFieldName")
|
||||
public Page<MetadataFieldRest> findByFieldName(@Parameter(value = "schema", required = false) String schemaName,
|
||||
@Parameter(value = "element", required = false) String elementName,
|
||||
@Parameter(value = "qualifier", required = false) String qualifierName,
|
||||
@Parameter(value = "query", required = false) String query,
|
||||
@Parameter(value = "exactName", required = false) String exactName,
|
||||
Pageable pageable) throws SQLException {
|
||||
Context context = obtainContext();
|
||||
|
||||
List<MetadataField> matchingMetadataFields = new ArrayList<>();
|
||||
|
||||
if (StringUtils.isBlank(exactName)) {
|
||||
// Find matches in Solr Search core
|
||||
DiscoverQuery discoverQuery =
|
||||
this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query);
|
||||
try {
|
||||
DiscoverResult searchResult = searchService.search(context, null, discoverQuery);
|
||||
for (IndexableObject object : searchResult.getIndexableObjects()) {
|
||||
if (object instanceof IndexableMetadataField) {
|
||||
matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject());
|
||||
}
|
||||
}
|
||||
} catch (SearchServiceException e) {
|
||||
log.error("Error while searching with Discovery", e);
|
||||
throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage());
|
||||
}
|
||||
} else {
|
||||
if (StringUtils.isNotBlank(elementName) || StringUtils.isNotBlank(qualifierName) ||
|
||||
StringUtils.isNotBlank(schemaName) || StringUtils.isNotBlank(query)) {
|
||||
throw new UnprocessableEntityException("Use either exactName or a combination of element, qualifier " +
|
||||
"and schema to search discovery for metadata fields");
|
||||
}
|
||||
// Find at most one match with exactName query param in DB
|
||||
MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.');
|
||||
if (exactMatchingMdField != null) {
|
||||
matchingMetadataFields.add(exactMatchingMdField);
|
||||
}
|
||||
}
|
||||
|
||||
return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection());
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a discovery query containing the filter queries derived from the request params
|
||||
*
|
||||
* @param context Context request
|
||||
* @param schemaName an exact match of the prefix of the metadata schema (e.g. "dc", "dcterms", "eperson")
|
||||
* @param elementName an exact match of the field's element (e.g. "contributor", "title")
|
||||
* @param qualifierName an exact match of the field's qualifier (e.g. "author", "alternative")
|
||||
* @param query part of the fully qualified field, should start with the start of the schema, element or
|
||||
* qualifier (e.g. "dc.ti", "contributor", "auth", "contributor.ot")
|
||||
* @return Discover query containing the filter queries derived from the request params
|
||||
* @throws SQLException If DB error
|
||||
*/
|
||||
private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName,
|
||||
String qualifierName, String query) throws SQLException {
|
||||
List<String> filterQueries = new ArrayList<>();
|
||||
if (StringUtils.isNotBlank(query)) {
|
||||
if (query.split("\\.").length > 3) {
|
||||
throw new IllegalArgumentException("Query param should not contain more than 2 dot (.) separators, " +
|
||||
"forming schema.element.qualifier metadata field name");
|
||||
}
|
||||
filterQueries.add(searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.FIELD_NAME_VARIATIONS,
|
||||
OPERATOR_EQUALS, query).getFilterQuery() + "*");
|
||||
}
|
||||
if (StringUtils.isNotBlank(schemaName)) {
|
||||
filterQueries.add(
|
||||
searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.SCHEMA_FIELD_NAME, OPERATOR_EQUALS,
|
||||
schemaName).getFilterQuery());
|
||||
}
|
||||
if (StringUtils.isNotBlank(elementName)) {
|
||||
filterQueries.add(
|
||||
searchService.toFilterQuery(context, MetadataFieldIndexFactoryImpl.ELEMENT_FIELD_NAME, OPERATOR_EQUALS,
|
||||
elementName).getFilterQuery());
|
||||
}
|
||||
if (StringUtils.isNotBlank(qualifierName)) {
|
||||
filterQueries.add(searchService
|
||||
.toFilterQuery(context, MetadataFieldIndexFactoryImpl.QUALIFIER_FIELD_NAME, OPERATOR_EQUALS,
|
||||
qualifierName).getFilterQuery());
|
||||
}
|
||||
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()]));
|
||||
return discoverQuery;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<MetadataFieldRest> getDomainClass() {
|
||||
return MetadataFieldRest.class;
|
||||
@@ -101,15 +221,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
@Override
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
protected MetadataFieldRest createAndReturn(Context context)
|
||||
throws AuthorizeException, SQLException {
|
||||
throws AuthorizeException, SQLException {
|
||||
|
||||
// parse request body
|
||||
MetadataFieldRest metadataFieldRest;
|
||||
try {
|
||||
metadataFieldRest = new ObjectMapper().readValue(
|
||||
getRequestService().getCurrentRequest().getHttpServletRequest().getInputStream(),
|
||||
MetadataFieldRest.class
|
||||
);
|
||||
getRequestService().getCurrentRequest().getHttpServletRequest().getInputStream(),
|
||||
MetadataFieldRest.class
|
||||
);
|
||||
} catch (IOException excIO) {
|
||||
throw new DSpaceBadRequestException("error parsing request body", excIO);
|
||||
}
|
||||
@@ -133,14 +253,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
MetadataField metadataField;
|
||||
try {
|
||||
metadataField = metadataFieldService.create(context, schema,
|
||||
metadataFieldRest.getElement(), metadataFieldRest.getQualifier(), metadataFieldRest.getScopeNote());
|
||||
metadataFieldRest.getElement(), metadataFieldRest.getQualifier(), metadataFieldRest.getScopeNote());
|
||||
metadataFieldService.update(context, metadataField);
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
throw new UnprocessableEntityException(
|
||||
"metadata field "
|
||||
+ schema.getName() + "." + metadataFieldRest.getElement()
|
||||
+ (metadataFieldRest.getQualifier() != null ? "." + metadataFieldRest.getQualifier() : "")
|
||||
+ " already exists"
|
||||
"metadata field "
|
||||
+ schema.getName() + "." + metadataFieldRest.getElement()
|
||||
+ (metadataFieldRest.getQualifier() != null ? "." + metadataFieldRest.getQualifier() : "")
|
||||
+ " already exists"
|
||||
);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
@@ -170,7 +290,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
@Override
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
protected MetadataFieldRest put(Context context, HttpServletRequest request, String apiCategory, String model,
|
||||
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
|
||||
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
|
||||
|
||||
MetadataFieldRest metadataFieldRest = new Gson().fromJson(jsonNode.toString(), MetadataFieldRest.class);
|
||||
|
||||
@@ -196,9 +316,11 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
|
||||
context.commit();
|
||||
} catch (NonUniqueMetadataException e) {
|
||||
throw new UnprocessableEntityException("metadata field "
|
||||
+ metadataField.getMetadataSchema().getName() + "." + metadataFieldRest.getElement()
|
||||
+ (metadataFieldRest.getQualifier() != null ? "." + metadataFieldRest.getQualifier() : "")
|
||||
+ " already exists");
|
||||
+ metadataField.getMetadataSchema().getName() + "." +
|
||||
metadataFieldRest.getElement()
|
||||
+ (metadataFieldRest.getQualifier() != null ?
|
||||
"." + metadataFieldRest.getQualifier() : "")
|
||||
+ " already exists");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
@@ -33,7 +33,7 @@ import org.dspace.scripts.factory.ScriptServiceFactory;
|
||||
import org.dspace.scripts.handler.DSpaceRunnableHandler;
|
||||
import org.dspace.scripts.service.ProcessService;
|
||||
import org.dspace.utils.DSpace;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
import org.springframework.core.task.TaskExecutor;
|
||||
|
||||
/**
|
||||
* The {@link DSpaceRunnableHandler} dealing with Scripts started from the REST api
|
||||
@@ -231,9 +231,8 @@ public class RestDSpaceRunnableHandler implements DSpaceRunnableHandler {
|
||||
* @param script The script to be ran
|
||||
*/
|
||||
public void schedule(DSpaceRunnable script) {
|
||||
ThreadPoolTaskExecutor taskExecutor = new DSpace().getServiceManager()
|
||||
.getServiceByName("dspaceRunnableThreadExecutor",
|
||||
ThreadPoolTaskExecutor.class);
|
||||
TaskExecutor taskExecutor = new DSpace().getServiceManager()
|
||||
.getServiceByName("dspaceRunnableThreadExecutor", TaskExecutor.class);
|
||||
Context context = new Context();
|
||||
try {
|
||||
Process process = processService.find(context, processId);
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.rest.submit.factory.impl;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.dspace.app.rest.model.MetadataValueRest;
|
||||
@@ -40,17 +41,10 @@ public abstract class MetadataValueRemovePatchOperation<DSO extends DSpaceObject
|
||||
String[] metadata = Utils.tokenize(target);
|
||||
List<MetadataValue> mm = getDSpaceObjectService().getMetadata(source, metadata[0], metadata[1], metadata[2],
|
||||
Item.ANY);
|
||||
getDSpaceObjectService().clearMetadata(context, source, metadata[0], metadata[1], metadata[2], Item.ANY);
|
||||
if (index != -1) {
|
||||
int idx = 0;
|
||||
for (MetadataValue m : mm) {
|
||||
if (idx != index) {
|
||||
getDSpaceObjectService().addMetadata(context, source, metadata[0], metadata[1], metadata[2],
|
||||
m.getLanguage(), m.getValue(), m.getAuthority(),
|
||||
m.getConfidence());
|
||||
}
|
||||
idx++;
|
||||
}
|
||||
getDSpaceObjectService().removeMetadataValues(context, source, Arrays.asList(mm.get(index)));
|
||||
} else {
|
||||
getDSpaceObjectService().clearMetadata(context, source, metadata[0], metadata[1], metadata[2], Item.ANY);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -156,9 +156,13 @@ public class MultipartFileSender {
|
||||
// Initialize response.
|
||||
response.reset();
|
||||
response.setBufferSize(bufferSize);
|
||||
response.setHeader(CONTENT_TYPE, contentType);
|
||||
if (contentType != null) {
|
||||
response.setHeader(CONTENT_TYPE, contentType);
|
||||
}
|
||||
response.setHeader(ACCEPT_RANGES, BYTES);
|
||||
response.setHeader(ETAG, checksum);
|
||||
if (checksum != null) {
|
||||
response.setHeader(ETAG, checksum);
|
||||
}
|
||||
response.setDateHeader(LAST_MODIFIED, lastModified);
|
||||
response.setDateHeader(EXPIRES, System.currentTimeMillis() + DEFAULT_EXPIRE_TIME);
|
||||
|
||||
@@ -481,4 +485,4 @@ public class MultipartFileSender {
|
||||
return Arrays.binarySearch(matchValues, toMatch) > -1 || Arrays.binarySearch(matchValues, "*") > -1;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,25 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
|
||||
|
||||
<bean id="dspaceRunnableThreadExecutor" class="org.springframework.core.task.SyncTaskExecutor"/>
|
||||
|
||||
<!-- This primary attribute is present so that we can assure that in the REST layer we'll always use this
|
||||
bean if it is present-->
|
||||
<bean id="metadata-import" class="org.dspace.app.bulkedit.MetadataImportScriptConfiguration" primary="true">
|
||||
<property name="description" value="Import metadata after batch editing" />
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataImport"/>
|
||||
</bean>
|
||||
|
||||
<bean id="metadata-export" class="org.dspace.app.bulkedit.MetadataExportScriptConfiguration" primary="true">
|
||||
<property name="description" value="Export metadata for batch editing"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
|
||||
</bean>
|
||||
|
||||
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
|
||||
<property name="description" value="Curation tasks"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.curate.Curation"/>
|
||||
</bean>
|
||||
</beans>
|
@@ -1637,6 +1637,65 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByObjectAndFeatureFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
Community com = CommunityBuilder.createCommunity(context).withName("A test community").build();
|
||||
CommunityRest comRest = communityConverter.convert(com, DefaultProjection.DEFAULT);
|
||||
String comUri = utils.linkToSingleResource(comRest, "self").getHref();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators - with eperson parameter
|
||||
getClient(adminToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "full")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", admin.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id", is(admin.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(admin.getID().toString())),
|
||||
hasJsonPath("$._embedded.object.id", is(com.getID().toString()))
|
||||
)
|
||||
)))
|
||||
// This is the Full Projection data not visible to eperson's full projection
|
||||
.andExpect(jsonPath("$._embedded.authorizations[0]._embedded.object._embedded.adminGroup",
|
||||
nullValue()));
|
||||
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
// verify that it works for administrators - with eperson parameter
|
||||
getClient(epersonToken).perform(get("/api/authz/authorizations/search/object")
|
||||
.param("uri", comUri)
|
||||
.param("projection", "full")
|
||||
.param("feature", alwaysTrue.getName())
|
||||
.param("eperson", eperson.getID().toString()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)))
|
||||
.andExpect(jsonPath("$._embedded.authorizations", contains(
|
||||
allOf(
|
||||
hasJsonPath("$.id",
|
||||
is(eperson.getID().toString() + "_" + alwaysTrue.getName() + "_"
|
||||
+ comRest.getUniqueType() + "_" + comRest.getId())),
|
||||
hasJsonPath("$.type", is("authorization")),
|
||||
hasJsonPath("$._embedded.feature.id", is(alwaysTrue.getName())),
|
||||
hasJsonPath("$._embedded.eperson.id", is(eperson.getID().toString())),
|
||||
hasJsonPath("$._embedded.object.id", is(com.getID().toString()))
|
||||
)
|
||||
)))
|
||||
// This is the Full Projection data not visible to eperson's full projection
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.authorizations[0]._embedded.object._embedded.adminGroup")
|
||||
.doesNotExist());
|
||||
}
|
||||
|
||||
// utility methods to build authorization ID without having an authorization object
|
||||
private String getAuthorizationID(EPerson eperson, AuthorizationFeature feature, BaseObjectRest obj) {
|
||||
return getAuthorizationID(eperson != null ? eperson.getID().toString() : null, feature.getName(),
|
||||
@@ -1663,4 +1722,6 @@ public class AuthorizationRestRepositoryIT extends AbstractControllerIntegration
|
||||
return (epersonUuid != null ? epersonUuid + "_" : "") + featureName + "_" + type + "_"
|
||||
+ id.toString();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
@@ -958,4 +959,168 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findBrowseByTitleItemsFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
|
||||
//2. Two public items that are readable by Anonymous
|
||||
Item publicItem1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("Java").withSubject("Unit Testing")
|
||||
.build();
|
||||
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/discover/browses/title/items")
|
||||
.param("projection", "full"))
|
||||
|
||||
//** THEN **
|
||||
//The status has to be 200 OK
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to be "application/hal+json;charset=UTF-8"
|
||||
.andExpect(content().contentType(contentType))
|
||||
// The full projection for anon shouldn't show the adminGroup in the response
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.items[0]._embedded.owningCollection._embedded.adminGroup").doesNotExist());
|
||||
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
getClient(adminToken).perform(get("/api/discover/browses/title/items")
|
||||
.param("projection", "full"))
|
||||
|
||||
//** THEN **
|
||||
//The status has to be 200 OK
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to be "application/hal+json;charset=UTF-8"
|
||||
.andExpect(content().contentType(contentType))
|
||||
// The full projection for admin should show the adminGroup in the response
|
||||
.andExpect(jsonPath("$._embedded.items[0]._embedded.owningCollection._embedded.adminGroup",
|
||||
nullValue()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void browseByAuthorFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community and one collection.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build();
|
||||
|
||||
//2. Twenty-one public items that are readable by Anonymous
|
||||
for (int i = 0; i <= 20; i++) {
|
||||
ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item " + String.format("%02d", i))
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Test, Author" + String.format("%02d", i))
|
||||
.withSubject("Java").withSubject("Unit Testing")
|
||||
.build();
|
||||
}
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
|
||||
getClient().perform(get("/api/discover/browses/author/entries")
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(21)))
|
||||
.andExpect(jsonPath("$.page.totalPages", is(2)))
|
||||
.andExpect(jsonPath("$.page.number", is(0)))
|
||||
.andExpect(
|
||||
jsonPath("$._links.next.href", Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(
|
||||
jsonPath("$._links.last.href", Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(
|
||||
jsonPath("$._links.self.href", Matchers.endsWith("/api/discover/browses/author/entries")));
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
getClient(adminToken).perform(get("/api/discover/browses/author/entries")
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(21)))
|
||||
.andExpect(jsonPath("$.page.totalPages", is(2)))
|
||||
.andExpect(jsonPath("$.page.number", is(0)))
|
||||
.andExpect(jsonPath("$._links.next.href",
|
||||
Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(jsonPath("$._links.last.href",
|
||||
Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(
|
||||
jsonPath("$._links.self.href",
|
||||
Matchers.endsWith("/api/discover/browses/author/entries")));
|
||||
|
||||
getClient().perform(get("/api/discover/browses/author/entries"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(21)))
|
||||
.andExpect(jsonPath("$.page.totalPages", is(2)))
|
||||
.andExpect(jsonPath("$.page.number", is(0)))
|
||||
.andExpect(
|
||||
jsonPath("$._links.next.href", Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(
|
||||
jsonPath("$._links.last.href", Matchers.containsString("/api/discover/browses/author/entries")))
|
||||
.andExpect(
|
||||
jsonPath("$._links.self.href", Matchers.endsWith("/api/discover/browses/author/entries")));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBrowseByDateIssuedItemsFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
|
||||
Item item1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/discover/browses/dateissued/items")
|
||||
.param("projection", "full"))
|
||||
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(
|
||||
jsonPath("$._embedded.items[0]._embedded.owningCollection._embedded.adminGroup").doesNotExist());
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
getClient(adminToken).perform(get("/api/discover/browses/dateissued/items")
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.items[0]._embedded.owningCollection._embedded.adminGroup",
|
||||
nullValue()));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@@ -305,6 +305,44 @@ public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTes
|
||||
col1.getName(), col1.getID(), col1.getHandle())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneCollectionFullProjectionTest() throws Exception {
|
||||
|
||||
//We turn off the authorization system in order to create the structure as defined below
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and one collection.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community Two")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
getClient(adminToken).perform(get("/api/core/collections/" + col1.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", CollectionMatcher.matchCollectionEntryFullProjection(
|
||||
col1.getName(), col1.getID(), col1.getHandle())));
|
||||
|
||||
getClient().perform(get("/api/core/collections/" + col1.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.not(CollectionMatcher.matchCollectionEntryFullProjection(
|
||||
col1.getName(), col1.getID(), col1.getHandle()))));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneCollectionUnAuthenticatedTest() throws Exception {
|
||||
|
||||
|
@@ -121,10 +121,10 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
getClient(authToken).perform(post("/api/core/communities")
|
||||
.content(mapper.writeValueAsBytes(comm))
|
||||
.contentType(contentType)
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isCreated())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchFullEmbeds()))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchNonAdminEmbeds()))
|
||||
.andExpect(jsonPath("$", Matchers.allOf(
|
||||
hasJsonPath("$.id", not(empty())),
|
||||
hasJsonPath("$.uuid", not(empty())),
|
||||
@@ -326,15 +326,15 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle()),
|
||||
CommunityMatcher
|
||||
.matchCommunityEntryFullProjection(child1.getName(), child1.getID(), child1.getHandle())
|
||||
.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(), child1.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
@@ -360,13 +360,13 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities").param("size", "2")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryMultipleTitles(titles, parentCommunity.getID(),
|
||||
parentCommunity.getHandle()),
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
|
||||
@@ -392,13 +392,13 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities").param("size", "2")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryMultipleTitles(titles, parentCommunity.getID(),
|
||||
parentCommunity.getHandle()),
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(childCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(childCommunity.getName(),
|
||||
childCommunity.getID(),
|
||||
childCommunity.getHandle())
|
||||
)))
|
||||
@@ -408,14 +408,14 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
2, 4)));
|
||||
|
||||
getClient().perform(get("/api/core/communities").param("size", "2").param("page", "1")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(secondParentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(secondParentCommunity.getName(),
|
||||
secondParentCommunity.getID(),
|
||||
secondParentCommunity.getHandle()),
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(thirdParentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(thirdParentCommunity.getName(),
|
||||
thirdParentCommunity.getID(),
|
||||
thirdParentCommunity.getHandle())
|
||||
)))
|
||||
@@ -433,11 +433,11 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.contains(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
@@ -499,17 +499,17 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
|
||||
getClient().perform(get("/api/core/communities")
|
||||
.param("size", "1")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.contains(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.not(
|
||||
Matchers.contains(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)
|
||||
)))
|
||||
@@ -519,16 +519,16 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
getClient().perform(get("/api/core/communities")
|
||||
.param("size", "1")
|
||||
.param("page", "1")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.contains(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.not(
|
||||
Matchers.contains(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)
|
||||
@@ -662,10 +662,10 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
|
||||
// When full projection is requested, response should include expected properties, links, and embeds.
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchFullEmbeds()))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchNonAdminEmbeds()))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchCommunityEntry(
|
||||
parentCommunity.getName(), parentCommunity.getID(), parentCommunity.getHandle())));
|
||||
|
||||
@@ -679,6 +679,39 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
parentCommunity.getName(), parentCommunity.getID(), parentCommunity.getHandle())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneFullProjectionTest() throws Exception {
|
||||
//We turn off the authorization system in order to create the structure as defined below
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and one collection.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", CommunityMatcher.matchCommunityEntryFullProjection(
|
||||
parentCommunity.getName(), parentCommunity.getID(), parentCommunity.getHandle())));
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.not(CommunityMatcher.matchCommunityEntryFullProjection(
|
||||
parentCommunity.getName(), parentCommunity.getID(), parentCommunity.getHandle()))));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneUnAuthenticatedTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
@@ -778,17 +811,17 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$", Matchers.not(
|
||||
Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)
|
||||
)))
|
||||
@@ -860,21 +893,21 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities/search/top")
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle()),
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity2.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity2.getName(),
|
||||
parentCommunity2.getID(),
|
||||
parentCommunity2.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.communities", Matchers.not(Matchers.containsInAnyOrder(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle()),
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child12.getName(), child12.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child12.getName(), child12.getID(),
|
||||
child12.getHandle())
|
||||
))))
|
||||
.andExpect(
|
||||
@@ -1337,17 +1370,17 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$", Matchers.not(
|
||||
Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)
|
||||
)))
|
||||
@@ -1374,13 +1407,13 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
;
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection("Electronic theses and dissertations",
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds("Electronic theses and dissertations",
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
|
||||
;
|
||||
@@ -1429,11 +1462,11 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
@@ -1492,11 +1525,11 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
@@ -1526,11 +1559,11 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
@@ -1563,17 +1596,17 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(parentCommunity.getName(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(parentCommunity.getName(),
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$", Matchers.not(
|
||||
Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection(child1.getName(), child1.getID(),
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds(child1.getName(), child1.getID(),
|
||||
child1.getHandle())
|
||||
)
|
||||
)))
|
||||
@@ -1603,13 +1636,13 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
|
||||
;
|
||||
|
||||
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString())
|
||||
.param("embed", CommunityMatcher.getFullEmbedsParameters()))
|
||||
.param("embed", CommunityMatcher.getNonAdminEmbeds()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
CommunityMatcher.matchCommunityEntryFullProjection("Electronic theses and dissertations",
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
CommunityMatcher.matchCommunityEntryNonAdminEmbeds("Electronic theses and dissertations",
|
||||
parentCommunity.getID(),
|
||||
parentCommunity.getHandle())
|
||||
)))
|
||||
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
|
||||
;
|
||||
|
@@ -13,6 +13,7 @@ import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
|
||||
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist;
|
||||
import static org.dspace.core.Constants.WRITE;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
|
||||
@@ -288,6 +289,47 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(jsonPath("$", publicItem1Matcher));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
|
||||
//2. Three public items that are readable by Anonymous with different subjects
|
||||
Item publicItem1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
context.restoreAuthSystemState();
|
||||
Matcher<? super Object> publicItem1Matcher = ItemMatcher.matchItemWithTitleAndDateIssued(publicItem1,
|
||||
"Public item 1",
|
||||
"2017-10-17");
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
getClient(token).perform(get("/api/core/items/" + publicItem1.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.owningCollection._embedded.adminGroup", nullValue()));
|
||||
|
||||
|
||||
getClient().perform(get("/api/core/items/" + publicItem1.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.owningCollection._embedded.adminGroup").doesNotExist());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneRelsTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
@@ -8,6 +8,7 @@
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static com.jayway.jsonpath.JsonPath.read;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
@@ -54,6 +55,9 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
private MetadataSchema metadataSchema;
|
||||
|
||||
public static final String METADATAFIELDS_ENDPOINT = "/api/core/metadatafields/";
|
||||
private static final String SEARCH_BYFIELDNAME_ENDPOINT = METADATAFIELDS_ENDPOINT + "search/byFieldName";
|
||||
|
||||
@Autowired
|
||||
private MetadataSchemaService metadataSchemaService;
|
||||
|
||||
@@ -74,13 +78,13 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields")
|
||||
.param("size", String.valueOf(100)))
|
||||
.param("size", String.valueOf(100)))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItems(
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys("dc", "title", null),
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys("dc", "date", "issued"))
|
||||
))
|
||||
))
|
||||
.andExpect(jsonPath("$._links.first.href", Matchers.containsString("/api/core/metadatafields")))
|
||||
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/metadatafields")))
|
||||
.andExpect(jsonPath("$._links.next.href", Matchers.containsString("/api/core/metadatafields")))
|
||||
@@ -102,7 +106,7 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField)
|
||||
)));
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -122,30 +126,30 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement", "AQualifier", "AScopeNote").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/search/bySchema")
|
||||
.param("schema", "dc")
|
||||
.param("size", String.valueOf(100)))
|
||||
.param("schema", "dc")
|
||||
.param("size", String.valueOf(100)))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItems(
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys("dc", "title", null),
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys("dc", "date", "issued"))
|
||||
))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(100)));
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/search/bySchema")
|
||||
.param("schema", schema.getName()))
|
||||
.param("schema", schema.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
}
|
||||
@@ -154,7 +158,7 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
public void findByUndefinedSchema() throws Exception {
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/search/bySchema")
|
||||
.param("schema", "undefined"))
|
||||
.param("schema", "undefined"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
@@ -168,6 +172,394 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_schema() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement", "AQualifier", "AScopeNote").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", schema.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_element() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema2",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement", "AQualifier", "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement", "AQualifier2", "AScopeNote2").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("element", "AnElement"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_elementAndQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema2",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", "AQualifier", "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement2", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("element", "AnElement2")
|
||||
.param("qualifier", "AQualifier"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
)))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_schemaAndQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema2",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", "AQualifier", "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement3", "AQualifier", "AScopeNote3").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", schema.getName())
|
||||
.param("qualifier", "AQualifier"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
)))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_schemaElementAndQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema2",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", "AQualifier", "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement3", "AQualifier", "AScopeNote3").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", schema.getName())
|
||||
.param("element", metadataField3.getElement())
|
||||
.param("qualifier", metadataField3.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_query() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema2",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", "AQualifier", "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement3", "AQualifier", "AScopeNote2").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("query", schema.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(3)));
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("query", schema.getName() + ".AnElement3"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
)))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("query", "AnElement3.AQual"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
)))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_query_noQualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "test",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", null, "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "test", null, "AScopeNote2").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("query", "test"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(2)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_invalidQuery() throws Exception {
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("query", "schema.element.qualifier.morestuff"))
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataSchema schema2 = MetadataSchemaBuilder.createMetadataSchema(context, "test",
|
||||
"http://www.dspace.org/ns/aschema2").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
|
||||
MetadataField metadataField2 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema2, "AnElement2", null, "AScopeNote2").build();
|
||||
|
||||
MetadataField metadataField3 = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "test", null, "AScopeNote2").build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", metadataField.toString('.')))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField3))
|
||||
)))
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.not(hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField2))
|
||||
)))
|
||||
.andExpect(jsonPath("$.page.size", is(20)))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName_NoResult() throws Exception {
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", "not.valid.mdstring"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(content().contentType(contentType))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName_combinedDiscoveryQueryParams_query() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", metadataField.toString('.'))
|
||||
.param("query", "query"))
|
||||
.andExpect(status().isUnprocessableEntity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName_combinedDiscoveryQueryParams_schema() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", metadataField.toString('.'))
|
||||
.param("schema", "schema"))
|
||||
.andExpect(status().isUnprocessableEntity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName_combinedDiscoveryQueryParams_element() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", metadataField.toString('.'))
|
||||
.param("element", "element"))
|
||||
.andExpect(status().isUnprocessableEntity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findByFieldName_exactName_combinedDiscoveryQueryParams_qualifier() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
MetadataField metadataField = MetadataFieldBuilder
|
||||
.createMetadataField(context, schema, "AnElement1", null, "AScopeNote").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("exactName", metadataField.toString('.'))
|
||||
.param("qualifier", "qualifier"))
|
||||
.andExpect(status().isUnprocessableEntity());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createSuccess() throws Exception {
|
||||
|
||||
@@ -183,10 +575,10 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient(authToken)
|
||||
.perform(post("/api/core/metadatafields")
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.param("projection", "full")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.param("projection", "full")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isCreated())
|
||||
.andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.id")));
|
||||
|
||||
@@ -199,6 +591,49 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void create_checkAddedToIndex() throws Exception {
|
||||
|
||||
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
|
||||
metadataFieldRest.setElement("testElementForCreate");
|
||||
metadataFieldRest.setQualifier("testQualifierForCreate");
|
||||
metadataFieldRest.setScopeNote(SCOPE_NOTE);
|
||||
|
||||
String authToken = getAuthToken(admin.getEmail(), password);
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
try {
|
||||
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue());
|
||||
|
||||
getClient(authToken)
|
||||
.perform(post("/api/core/metadatafields")
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.param("projection", "full")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isCreated())
|
||||
.andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.id")));
|
||||
|
||||
getClient(authToken).perform(get("/api/core/metadatafields/" + idRef.get()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
|
||||
metadataSchema.getName(), "testElementForCreate", "testQualifierForCreate")));
|
||||
|
||||
// new metadata field found in index
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", metadataSchema.getName())
|
||||
.param("element", metadataFieldRest.getElement())
|
||||
.param("qualifier", metadataFieldRest.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
|
||||
metadataFieldRest.getElement(), metadataFieldRest.getQualifier()))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
} finally {
|
||||
MetadataFieldBuilder.deleteMetadataField(idRef.get());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void createUnauthorized() throws Exception {
|
||||
|
||||
@@ -209,9 +644,9 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient()
|
||||
.perform(post("/api/core/metadatafields")
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@@ -227,9 +662,9 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient(token)
|
||||
.perform(post("/api/core/metadatafields")
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.param("schemaId", metadataSchema.getID() + "")
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@@ -315,6 +750,44 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void delete_checkDeletedFromIndex() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
|
||||
"http://www.dspace.org/ns/aschema").build();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, schema, ELEMENT, QUALIFIER,
|
||||
SCOPE_NOTE).build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
Integer id = metadataField.getID();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", schema.getName())
|
||||
.param("element", metadataField.getElement())
|
||||
.param("qualifier", metadataField.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataField(metadataField))
|
||||
));
|
||||
|
||||
getClient(getAuthToken(admin.getEmail(), password))
|
||||
.perform(delete("/api/core/metadatafields/" + id))
|
||||
.andExpect(status().isNoContent());
|
||||
|
||||
assertThat(metadataFieldService.find(context, id), nullValue());
|
||||
|
||||
// deleted metadata field not found in index
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", schema.getName())
|
||||
.param("element", metadataField.getElement())
|
||||
.param("qualifier", metadataField.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void update() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
@@ -332,15 +805,68 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient(getAuthToken(admin.getEmail(), password))
|
||||
.perform(put("/api/core/metadatafields/" + metadataField.getID())
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
|
||||
metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED)
|
||||
));
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void update_checkUpdatedInIndex() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
|
||||
.build();
|
||||
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", metadataSchema.getName())
|
||||
.param("element", metadataField.getElement())
|
||||
.param("qualifier", metadataField.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
|
||||
metadataField.getElement(), metadataField.getQualifier()))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
|
||||
metadataFieldRest.setId(metadataField.getID());
|
||||
metadataFieldRest.setElement(ELEMENT_UPDATED);
|
||||
metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
|
||||
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
|
||||
|
||||
getClient(getAuthToken(admin.getEmail(), password))
|
||||
.perform(put("/api/core/metadatafields/" + metadataField.getID())
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isOk());
|
||||
|
||||
// new metadata field found in index
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", metadataSchema.getName())
|
||||
.param("element", ELEMENT_UPDATED)
|
||||
.param("qualifier", QUALIFIER_UPDATED))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
|
||||
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
|
||||
ELEMENT_UPDATED, QUALIFIER_UPDATED))
|
||||
))
|
||||
.andExpect(jsonPath("$.page.totalElements", is(1)));
|
||||
|
||||
// original metadata field not found in index
|
||||
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
|
||||
.param("schema", metadataSchema.getName())
|
||||
.param("element", metadataField.getElement())
|
||||
.param("qualifier", metadataField.getQualifier()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -360,15 +886,15 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient()
|
||||
.perform(put("/api/core/metadatafields/" + metadataField.getID())
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isUnauthorized());
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
|
||||
metadataSchema.getName(), ELEMENT, QUALIFIER)
|
||||
));
|
||||
));
|
||||
|
||||
|
||||
}
|
||||
@@ -390,15 +916,15 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
|
||||
|
||||
getClient(getAuthToken(eperson.getEmail(), password))
|
||||
.perform(put("/api/core/metadatafields/" + metadataField.getID())
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
|
||||
.contentType(contentType))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
|
||||
metadataSchema.getName(), ELEMENT, QUALIFIER)
|
||||
));
|
||||
));
|
||||
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static org.dspace.builder.ItemBuilder.createItem;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.services.ConfigurationService;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.web.servlet.MvcResult;
|
||||
|
||||
/**
|
||||
* Integration test to test the /sitemaps/{name} endpoint, see {@link SitemapRestController}
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 08/07/2020
|
||||
*/
|
||||
public class SitemapRestControllerIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
ConfigurationService configurationService;
|
||||
|
||||
private final static String SITEMAPS_ENDPOINT = "sitemaps";
|
||||
|
||||
private Item item1;
|
||||
private Item item2;
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
configurationService.setProperty("sitemap.path", SITEMAPS_ENDPOINT);
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Community community = CommunityBuilder.createCommunity(context).build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community).build();
|
||||
this.item1 = createItem(context, collection)
|
||||
.withTitle("Test 1")
|
||||
.withIssueDate("2010-10-17")
|
||||
.build();
|
||||
this.item2 = createItem(context, collection)
|
||||
.withTitle("Test 2")
|
||||
.withIssueDate("2015-8-3")
|
||||
.build();
|
||||
|
||||
runDSpaceScript("generate-sitemaps");
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@After
|
||||
public void destroy() throws Exception {
|
||||
// delete sitemaps generated by tests in before
|
||||
runDSpaceScript("generate-sitemaps", "-d");
|
||||
|
||||
super.destroy();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSitemap_notValidSiteMapFile() throws Exception {
|
||||
//** WHEN **
|
||||
//We attempt to retrieve a non valid sitemap file
|
||||
getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/no-such-file"))
|
||||
//** THEN **
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test(expected = ServletException.class)
|
||||
public void testSitemap_fileSystemTraversal_dspaceCfg() throws Exception {
|
||||
//** WHEN **
|
||||
//We attempt to use endpoint for malicious file system traversal
|
||||
getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/%2e%2e/config/dspace.cfg"));
|
||||
}
|
||||
|
||||
@Test(expected = ServletException.class)
|
||||
public void testSitemap_fileSystemTraversal_dspaceCfg2() throws Exception {
|
||||
//** WHEN **
|
||||
//We attempt to use endpoint for malicious file system traversal
|
||||
getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/%2e%2e%2fconfig%2fdspace.cfg"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSitemap_sitemapIndexHtml() throws Exception {
|
||||
//** WHEN **
|
||||
//We retrieve sitemap_index.html
|
||||
MvcResult result = getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/sitemap_index.html"))
|
||||
//** THEN **
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to match
|
||||
.andExpect(content().contentType("text/html"))
|
||||
.andReturn();
|
||||
|
||||
String response = result.getResponse().getContentAsString();
|
||||
// contains a link to /sitemaps/sitemap0.html
|
||||
assertTrue(response.contains("/sitemap0.html"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSitemap_sitemap0Html() throws Exception {
|
||||
//** WHEN **
|
||||
//We retrieve sitemap0.html
|
||||
MvcResult result = getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/sitemap0.html"))
|
||||
//** THEN **
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to match
|
||||
.andExpect(content().contentType("text/html"))
|
||||
.andReturn();
|
||||
|
||||
String response = result.getResponse().getContentAsString();
|
||||
// contains a link to items: [dspace.ui.url]/items/<uuid>
|
||||
assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item1.getID()));
|
||||
assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item2.getID()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSitemap_sitemapIndexXml() throws Exception {
|
||||
//** WHEN **
|
||||
//We retrieve sitemap_index.xml
|
||||
MvcResult result = getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/sitemap_index.xml"))
|
||||
//** THEN **
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to match
|
||||
.andExpect(content().contentType("application/xml"))
|
||||
.andReturn();
|
||||
|
||||
String response = result.getResponse().getContentAsString();
|
||||
// contains a link to /sitemaps/sitemap0.html
|
||||
assertTrue(response.contains("/sitemap0.xml"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSitemap_sitemap0Xml() throws Exception {
|
||||
//** WHEN **
|
||||
//We retrieve sitemap0.html
|
||||
MvcResult result = getClient().perform(get("/" + SITEMAPS_ENDPOINT + "/sitemap0.xml"))
|
||||
//** THEN **
|
||||
.andExpect(status().isOk())
|
||||
//We expect the content type to match
|
||||
.andExpect(content().contentType("application/xml"))
|
||||
.andReturn();
|
||||
|
||||
String response = result.getResponse().getContentAsString();
|
||||
// contains a link to items: [dspace.ui.url]/items/<uuid>
|
||||
assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item1.getID()));
|
||||
assertTrue(response.contains(configurationService.getProperty("dspace.ui.url") + "/items/" + item2.getID()));
|
||||
}
|
||||
}
|
@@ -349,7 +349,7 @@ public class WorkflowDefinitionRestRepositoryIT extends AbstractControllerIntegr
|
||||
|
||||
if (StringUtils.isNotBlank(firstNonDefaultWorkflowName)) {
|
||||
List<Collection> mappedCollections
|
||||
= xmlWorkflowFactory.getCollectionHandlesMappedToWorklow(context, firstNonDefaultWorkflowName);
|
||||
= xmlWorkflowFactory.getCollectionHandlesMappedToWorkflow(context, firstNonDefaultWorkflowName);
|
||||
//When we call this facets endpoint
|
||||
if (mappedCollections.size() > 0) {
|
||||
//returns array of collection jsons that are mapped to given workflow
|
||||
|
@@ -10,6 +10,7 @@ package org.dspace.app.rest;
|
||||
import static com.jayway.jsonpath.JsonPath.read;
|
||||
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
|
||||
@@ -1817,4 +1818,44 @@ public class WorkflowItemRestRepositoryIT extends AbstractControllerIntegrationT
|
||||
is("http://localhost/api/workflow/pooltask/search"))
|
||||
)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1")
|
||||
.withWorkflowGroup(1, admin).build();
|
||||
|
||||
//2. a workflow item
|
||||
XmlWorkflowItem witem = WorkflowItemBuilder.createWorkflowItem(context, col1)
|
||||
.withTitle("Workflow Item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
getClient(epersonToken).perform(get("/api/workflow/workflowitems/" + witem.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.collection._embedded.adminGroup").doesNotExist());
|
||||
|
||||
getClient(adminToken).perform(get("/api/workflow/workflowitems/" + witem.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.collection._embedded.adminGroup", nullValue()));
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -13,6 +13,7 @@ import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST_VALUE;
|
||||
import static org.springframework.http.MediaType.parseMediaType;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
|
||||
@@ -4572,4 +4573,44 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
|
||||
Matchers.is(WorkspaceItemMatcher.matchItemWithTitleAndDateIssuedAndSubject(witem,
|
||||
"Test title", "2019-04-25", "ExtraEntry"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneFullProjectionTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
//** GIVEN **
|
||||
//1. A community-collection structure with one parent community with sub-community and two collections.
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
//2. a workspace item
|
||||
WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1)
|
||||
.withTitle("Workspace Item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
getClient(adminToken).perform(get("/api/submission/workspaceitems/" + witem.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.collection._embedded.adminGroup", nullValue()));
|
||||
|
||||
|
||||
getClient(epersonToken).perform(get("/api/submission/workspaceitems/" + witem.getID())
|
||||
.param("projection", "full"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.collection._embedded.adminGroup").doesNotExist());
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -11,22 +11,29 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.dspace.app.rest.authorization.impl.AdministratorOfFeature;
|
||||
import org.dspace.app.rest.converter.CollectionConverter;
|
||||
import org.dspace.app.rest.converter.CommunityConverter;
|
||||
import org.dspace.app.rest.converter.ItemConverter;
|
||||
import org.dspace.app.rest.converter.SiteConverter;
|
||||
import org.dspace.app.rest.matcher.AuthorizationMatcher;
|
||||
import org.dspace.app.rest.model.CollectionRest;
|
||||
import org.dspace.app.rest.model.CommunityRest;
|
||||
import org.dspace.app.rest.model.ItemRest;
|
||||
import org.dspace.app.rest.model.SiteRest;
|
||||
import org.dspace.app.rest.projection.DefaultProjection;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CommunityService;
|
||||
@@ -55,6 +62,8 @@ public class AdministratorFeatureIT extends AbstractControllerIntegrationTest {
|
||||
@Autowired
|
||||
CommunityService communityService;
|
||||
@Autowired
|
||||
private ItemConverter itemConverter;
|
||||
@Autowired
|
||||
private CommunityConverter communityConverter;
|
||||
@Autowired
|
||||
private CollectionConverter collectionConverter;
|
||||
@@ -63,6 +72,22 @@ public class AdministratorFeatureIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
private SiteService siteService;
|
||||
|
||||
private EPerson adminComA;
|
||||
private EPerson adminComB;
|
||||
private EPerson adminColA;
|
||||
private EPerson adminColB;
|
||||
private EPerson adminItemA;
|
||||
private EPerson adminItemB;
|
||||
|
||||
private Community communityA;
|
||||
private Community subCommunityOfA;
|
||||
private Community communityB;
|
||||
private Collection collectionA;
|
||||
private Collection collectionB;
|
||||
private Item itemInCollectionA;
|
||||
private Item itemInCollectionB;
|
||||
|
||||
|
||||
/**
|
||||
* this hold a reference to the test feature {@link AdministratorOfFeature}
|
||||
*/
|
||||
@@ -74,201 +99,345 @@ public class AdministratorFeatureIT extends AbstractControllerIntegrationTest {
|
||||
super.setUp();
|
||||
siteService = ContentServiceFactory.getInstance().getSiteService();
|
||||
administratorFeature = authorizationFeatureService.find(AdministratorOfFeature.NAME);
|
||||
initAdminsAndObjects();
|
||||
}
|
||||
|
||||
private void initAdminsAndObjects() throws SQLException, AuthorizeException {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
|
||||
adminComA = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminComA@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
adminComB = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminComB@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
adminColA = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminColA@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
adminColB = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminColB@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
adminItemA = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminItemA@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
adminItemB = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminItemB@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
communityA = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community A")
|
||||
.withAdminGroup(adminComA)
|
||||
.build();
|
||||
|
||||
subCommunityOfA = CommunityBuilder.createSubCommunity(context, communityA)
|
||||
.withName("Sub Community of CommunityA")
|
||||
.build();
|
||||
|
||||
communityB = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community B")
|
||||
.withAdminGroup(adminComB)
|
||||
.build();
|
||||
|
||||
collectionA = CollectionBuilder.createCollection(context, subCommunityOfA)
|
||||
.withName("Collection A")
|
||||
.withAdminGroup(adminColA)
|
||||
.build();
|
||||
|
||||
collectionB = CollectionBuilder.createCollection(context, communityB)
|
||||
.withName("Collection B")
|
||||
.withAdminGroup(adminColB)
|
||||
.build();
|
||||
|
||||
itemInCollectionA = ItemBuilder.createItem(context, collectionA)
|
||||
.withTitle("Item in Collection A")
|
||||
.withAdminUser(adminItemA)
|
||||
.build();
|
||||
|
||||
itemInCollectionB = ItemBuilder.createItem(context, collectionB)
|
||||
.withTitle("Item in Collection B")
|
||||
.withAdminUser(adminItemB)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void communityWithAdministratorFeatureTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson adminComA = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminComA@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
EPerson adminComB = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminComB@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
Community communityA = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community A")
|
||||
.withAdminGroup(adminComA)
|
||||
.build();
|
||||
|
||||
Community subCommunityOfA = CommunityBuilder.createSubCommunity(context, communityA)
|
||||
.withName("Sub Community of CommunityA")
|
||||
.build();
|
||||
|
||||
Collection collectionOfSubComm = CollectionBuilder.createCollection(context, subCommunityOfA)
|
||||
.withName("Collection of subCommunity")
|
||||
.build();
|
||||
|
||||
Community communityB = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community B")
|
||||
.withAdminGroup(adminComB)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
CommunityRest communityRestA = communityConverter.convert(communityA, DefaultProjection.DEFAULT);
|
||||
CommunityRest SubCommunityOfArest = communityConverter.convert(subCommunityOfA, DefaultProjection.DEFAULT);
|
||||
CollectionRest collectionRestOfSubComm = collectionConverter.convert(collectionOfSubComm,
|
||||
DefaultProjection.DEFAULT);
|
||||
CommunityRest communityRestB = communityConverter.convert(communityB, DefaultProjection.DEFAULT);
|
||||
CommunityRest SubCommunityOfARest = communityConverter.convert(subCommunityOfA, DefaultProjection.DEFAULT);
|
||||
|
||||
// tokens
|
||||
String tokenAdminComA = getAuthToken(adminComA.getEmail(), password);
|
||||
String tokenAdminComB = getAuthToken(adminComB.getEmail(), password);
|
||||
String tokenAdmin = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// define authorizations that we know must exists
|
||||
Authorization authAdminCommunityA = new Authorization(adminComA, administratorFeature, communityRestA);
|
||||
Authorization authAdminSubCommunityOfA = new Authorization(adminComA, administratorFeature,SubCommunityOfArest);
|
||||
Authorization authAdminAColl = new Authorization(adminComA, administratorFeature, collectionRestOfSubComm);
|
||||
Authorization authAdminSiteComA = new Authorization(admin, administratorFeature, communityRestA);
|
||||
Authorization authAdminComAComA = new Authorization(adminComA, administratorFeature, communityRestA);
|
||||
Authorization authAdminComASubComA = new Authorization(adminComA, administratorFeature, SubCommunityOfARest);
|
||||
Authorization authAdminComBComB = new Authorization(adminComB, administratorFeature, communityRestB);
|
||||
|
||||
|
||||
// define authorizations that we know not exists
|
||||
Authorization authAdminBColl = new Authorization(adminComB, administratorFeature, collectionRestOfSubComm);
|
||||
Authorization authAdminBCommunityA = new Authorization(adminComB, administratorFeature, communityRestA);
|
||||
Authorization authAdminComBComA = new Authorization(adminComB, administratorFeature, communityRestA);
|
||||
Authorization authAdminComBSubComA = new Authorization(adminComB, administratorFeature, SubCommunityOfARest);
|
||||
Authorization authAdminColAComA = new Authorization(adminColA, administratorFeature, communityRestA);
|
||||
Authorization authAdminItemAComA = new Authorization(adminItemA, administratorFeature, communityRestA);
|
||||
Authorization authEPersonComA = new Authorization(eperson, administratorFeature, communityRestA);
|
||||
Authorization authAnonymousComA = new Authorization(null, administratorFeature, communityRestA);
|
||||
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminCommunityA.getID()))
|
||||
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSiteComA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminCommunityA))));
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminSiteComA))));
|
||||
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminSubCommunityOfA.getID()))
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminComAComA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher
|
||||
.matchAuthorization(authAdminSubCommunityOfA))));
|
||||
.matchAuthorization(authAdminComAComA))));
|
||||
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminAColl.getID()))
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminComASubComA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminAColl))));
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminComASubComA))));
|
||||
|
||||
getClient(tokenAdminComB).perform(get("/api/authz/authorizations/" + authAdminBCommunityA.getID()))
|
||||
getClient(tokenAdminComB).perform(get("/api/authz/authorizations/" + authAdminComBComB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminComBComB))));
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminComBComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient(tokenAdminComB).perform(get("/api/authz/authorizations/" + authAdminBColl.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminComBSubComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminColAComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminItemAComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authEPersonComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAnonymousComA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void collectionWithAdministratorFeatureTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
EPerson adminColA = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminColA@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
EPerson adminColB = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("adminColB@example.com")
|
||||
.withPassword(password)
|
||||
.build();
|
||||
|
||||
Community parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
|
||||
Collection collectionA = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.withName("Collection A")
|
||||
.withAdminGroup(adminColA)
|
||||
.build();
|
||||
|
||||
Collection collectionB = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.withName("Collection B")
|
||||
.withAdminGroup(adminColB)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
CollectionRest collectionRestA = collectionConverter.convert(collectionA, DefaultProjection.DEFAULT);
|
||||
CollectionRest collectionRestB = collectionConverter.convert(collectionB, DefaultProjection.DEFAULT);
|
||||
|
||||
String tokenAdminColA = getAuthToken(adminColA.getEmail(), password);
|
||||
String tokenAdminColB = getAuthToken(adminColB.getEmail(), password);
|
||||
String tokenAdminComA = getAuthToken(adminComA.getEmail(), password);
|
||||
String tokenAdminComB = getAuthToken(adminComB.getEmail(), password);
|
||||
String tokenAdmin = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// define authorizations that we know must exists
|
||||
Authorization authAdminCollectionA = new Authorization(adminColA, administratorFeature, collectionRestA);
|
||||
Authorization authAdminCollectionB = new Authorization(adminColB, administratorFeature, collectionRestB);
|
||||
|
||||
Authorization authAdminSiteColA = new Authorization(admin, administratorFeature, collectionRestA);
|
||||
Authorization authAdminComAColA = new Authorization(adminComA, administratorFeature, collectionRestA);
|
||||
Authorization authAdminColAColA = new Authorization(adminColA, administratorFeature, collectionRestA);
|
||||
|
||||
Authorization authAdminSiteColB = new Authorization(admin, administratorFeature, collectionRestB);
|
||||
Authorization authAdminComBColB = new Authorization(adminComB, administratorFeature, collectionRestB);
|
||||
Authorization authAdminColBColB = new Authorization(adminColB, administratorFeature, collectionRestB);
|
||||
|
||||
// define authorization that we know not exists
|
||||
Authorization authAdminBcollectionA = new Authorization(adminColB, administratorFeature, collectionRestA);
|
||||
Authorization authAdminColBColA = new Authorization(adminColB, administratorFeature, collectionRestA);
|
||||
Authorization authAdminComBColA = new Authorization(adminComB, administratorFeature, collectionRestA);
|
||||
Authorization authAdminItemAColA = new Authorization(adminItemA, administratorFeature, collectionRestA);
|
||||
Authorization authEPersonColA = new Authorization(eperson, administratorFeature, collectionRestA);
|
||||
Authorization authAnonymousColA = new Authorization(null, administratorFeature, collectionRestA);
|
||||
|
||||
getClient(tokenAdminColA).perform(get("/api/authz/authorizations/" + authAdminCollectionA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminCollectionA))));
|
||||
|
||||
getClient(tokenAdminColB).perform(get("/api/authz/authorizations/" + authAdminCollectionB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminCollectionB))));
|
||||
|
||||
getClient(tokenAdminColB).perform(get("/api/authz/authorizations/" + authAdminBcollectionA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSiteColA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminSiteColA))));
|
||||
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminComAColA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminComAColA))));
|
||||
|
||||
getClient(tokenAdminColA).perform(get("/api/authz/authorizations/" + authAdminColAColA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminColAColA))));
|
||||
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSiteColB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminSiteColB))));
|
||||
|
||||
getClient(tokenAdminComB).perform(get("/api/authz/authorizations/" + authAdminComBColB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminComBColB))));
|
||||
|
||||
getClient(tokenAdminColB).perform(get("/api/authz/authorizations/" + authAdminColBColB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminColBColB))));
|
||||
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminColBColA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminComBColA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminItemAColA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authEPersonColA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAnonymousColA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void siteWithAdministratorFeatureTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
Community parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Test Parent Community")
|
||||
.build();
|
||||
|
||||
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.withName("Test Collection")
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
Site site = siteService.findSite(context);
|
||||
SiteRest siteRest = siteConverter.convert(site, DefaultProjection.DEFAULT);
|
||||
CommunityRest communityRest = communityConverter.convert(parentCommunity, DefaultProjection.DEFAULT);
|
||||
CollectionRest collectionRest = collectionConverter.convert(collection, DefaultProjection.DEFAULT);
|
||||
|
||||
// tokens
|
||||
String tokenAdmin = getAuthToken(admin.getEmail(), password);
|
||||
String tokenEperson = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
|
||||
// define authorizations of Admin that we know must exists
|
||||
Authorization authAdminSite = new Authorization(admin, administratorFeature, siteRest);
|
||||
Authorization authAdminCommunity = new Authorization(admin, administratorFeature, communityRest);
|
||||
Authorization authAdminCollection = new Authorization(admin, administratorFeature, collectionRest);
|
||||
|
||||
// define authorizations of EPerson that we know not exists
|
||||
Authorization authAdminComASite = new Authorization(adminComA, administratorFeature, siteRest);
|
||||
Authorization authAdminColASite = new Authorization(adminColA, administratorFeature, siteRest);
|
||||
Authorization authAdminItemASite = new Authorization(adminItemA, administratorFeature, siteRest);
|
||||
Authorization authEPersonSite = new Authorization(eperson, administratorFeature, siteRest);
|
||||
Authorization authEpersonCommunity = new Authorization(eperson, administratorFeature, communityRest);
|
||||
Authorization authEpersonCollection = new Authorization(eperson, administratorFeature, collectionRest);
|
||||
|
||||
// define authorizations of Anonymous that we know not exists
|
||||
Authorization authAnonymousSite = new Authorization(null, administratorFeature, siteRest);
|
||||
Authorization authAnonymousCommunity = new Authorization(null, administratorFeature, communityRest);
|
||||
Authorization authAnonymousCollection = new Authorization(null, administratorFeature, collectionRest);
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSite.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminSite))));
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminCommunity.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminCommunity))));
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminCollection.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(AuthorizationMatcher.matchAuthorization(authAdminCollection))));
|
||||
|
||||
getClient(tokenEperson).perform(get("/api/authz/authorizations/" + authEPersonSite.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authEPersonSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient(tokenEperson).perform(get("/api/authz/authorizations/" + authEpersonCommunity.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminComASite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient(tokenEperson).perform(get("/api/authz/authorizations/" + authEpersonCollection.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminColASite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient().perform(get("/api/authz/authorizations/" + authAnonymousSite.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminItemASite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient().perform(get("/api/authz/authorizations/" + authAnonymousCommunity.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authEPersonSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
|
||||
getClient().perform(get("/api/authz/authorizations/" + authAnonymousCollection.getID()))
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAnonymousSite.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void itemWithAdministratorFeatureTest() throws Exception {
|
||||
|
||||
ItemRest itemRestA = itemConverter.convert(itemInCollectionA, DefaultProjection.DEFAULT);
|
||||
ItemRest itemRestB = itemConverter.convert(itemInCollectionB, DefaultProjection.DEFAULT);
|
||||
|
||||
String tokenAdminItemA = getAuthToken(adminItemA.getEmail(), password);
|
||||
String tokenAdminItemB = getAuthToken(adminItemB.getEmail(), password);
|
||||
String tokenAdminColA = getAuthToken(adminColA.getEmail(), password);
|
||||
String tokenAdminColB = getAuthToken(adminColB.getEmail(), password);
|
||||
String tokenAdminComA = getAuthToken(adminComA.getEmail(), password);
|
||||
String tokenAdminComB = getAuthToken(adminComB.getEmail(), password);
|
||||
String tokenAdmin = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
// define authorizations that we know must exists
|
||||
|
||||
Authorization authAdminSiteItemA = new Authorization(admin, administratorFeature, itemRestA);
|
||||
Authorization authAdminComAItemA = new Authorization(adminComA, administratorFeature, itemRestA);
|
||||
Authorization authAdminColAItemA = new Authorization(adminColA, administratorFeature, itemRestA);
|
||||
Authorization authAdminItemAItemA = new Authorization(adminItemA, administratorFeature, itemRestA);
|
||||
|
||||
Authorization authAdminSiteItemB = new Authorization(admin, administratorFeature, itemRestB);
|
||||
Authorization authAdminComBItemB = new Authorization(adminComB, administratorFeature, itemRestB);
|
||||
Authorization authAdminColBItemB = new Authorization(adminColB, administratorFeature, itemRestB);
|
||||
Authorization authAdminItemBItemB = new Authorization(adminItemB, administratorFeature, itemRestB);
|
||||
|
||||
|
||||
// define authorization that we know not exists
|
||||
Authorization authAdminComBItemA = new Authorization(adminComB, administratorFeature, itemRestA);
|
||||
Authorization authAdminColBItemA = new Authorization(adminColB, administratorFeature, itemRestA);
|
||||
Authorization authAdminItemBItemA = new Authorization(adminItemB, administratorFeature, itemRestA);
|
||||
Authorization authEPersonItemA = new Authorization(eperson, administratorFeature, itemRestA);
|
||||
Authorization authAnonymousItemA = new Authorization(null, administratorFeature, itemRestA);
|
||||
|
||||
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSiteItemA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminSiteItemA))));
|
||||
|
||||
getClient(tokenAdminComA).perform(get("/api/authz/authorizations/" + authAdminComAItemA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminComAItemA))));
|
||||
|
||||
getClient(tokenAdminColA).perform(get("/api/authz/authorizations/" + authAdminColAItemA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminColAItemA))));
|
||||
|
||||
getClient(tokenAdminItemA).perform(get("/api/authz/authorizations/" + authAdminItemAItemA.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminItemAItemA))));
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminSiteItemB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminSiteItemB))));
|
||||
|
||||
getClient(tokenAdminComB).perform(get("/api/authz/authorizations/" + authAdminComBItemB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminComBItemB))));
|
||||
|
||||
getClient(tokenAdminColB).perform(get("/api/authz/authorizations/" + authAdminColBItemB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminColBItemB))));
|
||||
|
||||
getClient(tokenAdminItemB).perform(get("/api/authz/authorizations/" + authAdminItemBItemB.getID()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", Matchers.is(
|
||||
AuthorizationMatcher.matchAuthorization(authAdminItemBItemB))));
|
||||
|
||||
|
||||
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminComBItemA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminColBItemA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAdminItemBItemA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authEPersonItemA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
getClient(tokenAdmin).perform(get("/api/authz/authorizations/" + authAnonymousItemA.getID()))
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,147 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.app.rest.csv;
|
||||
|
||||
import static com.jayway.jsonpath.JsonPath.read;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.fileUpload;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
|
||||
import org.dspace.app.rest.matcher.ProcessMatcher;
|
||||
import org.dspace.app.rest.model.ParameterValueRest;
|
||||
import org.dspace.app.rest.projection.Projection;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ProcessBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class CsvExportIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
|
||||
@Autowired
|
||||
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
|
||||
|
||||
@Test
|
||||
public void metadataExportTestWithoutFileParameterSucceeds() throws Exception {
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
Collection col3 = CollectionBuilder.createCollection(context, child1).withName("OrgUnits").build();
|
||||
|
||||
Item article = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Article")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Publication")
|
||||
.build();
|
||||
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", col1.getHandle()));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token)
|
||||
.perform(fileUpload("/api/system/scripts/metadata-export/processes")
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("metadata-export",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.COMPLETED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
String t = "";
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idRef.get());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void metadataExportTestWithFileParameterFails() throws Exception {
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
Collection col3 = CollectionBuilder.createCollection(context, child1).withName("OrgUnits").build();
|
||||
|
||||
Item article = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Article")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Publication")
|
||||
.build();
|
||||
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
parameters.add(new DSpaceCommandLineParameter("-f", "test.csv"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", col1.getHandle()));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token)
|
||||
.perform(fileUpload("/api/system/scripts/metadata-export/processes")
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("metadata-export",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.FAILED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
String t = "";
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idRef.get());
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,39 +7,55 @@
|
||||
*/
|
||||
package org.dspace.app.rest.csv;
|
||||
|
||||
import static com.jayway.jsonpath.JsonPath.read;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.fileUpload;
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
|
||||
import org.dspace.app.rest.matcher.ProcessMatcher;
|
||||
import org.dspace.app.rest.matcher.RelationshipMatcher;
|
||||
import org.dspace.app.rest.model.ParameterValueRest;
|
||||
import org.dspace.app.rest.projection.Projection;
|
||||
import org.dspace.app.rest.test.AbstractEntityIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ProcessBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.content.Relationship;
|
||||
import org.dspace.content.service.EntityTypeService;
|
||||
import org.dspace.content.service.ItemService;
|
||||
import org.dspace.content.service.RelationshipService;
|
||||
import org.dspace.content.service.RelationshipTypeService;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
|
||||
public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
|
||||
@@ -55,6 +71,9 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
@Autowired
|
||||
private ItemService itemService;
|
||||
|
||||
@Autowired
|
||||
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
|
||||
|
||||
@Test
|
||||
public void createRelationshipsWithCsvImportTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
@@ -119,6 +138,7 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
|
||||
assertArticleRelationships(article, itemB, itemC, itemF);
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void assertItemERelationships(Item itemB, Item itemE, Item itemF) throws SQLException {
|
||||
@@ -132,8 +152,8 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
List<Relationship> relationshipsForArticle = relationshipService
|
||||
.findByItemAndRelationshipType(context, article, relationshipTypeService
|
||||
.findbyTypesAndTypeName(context, entityTypeService.findByEntityType(context, "Publication"),
|
||||
entityTypeService.findByEntityType(context, "Person"), "isAuthorOfPublication",
|
||||
"isPublicationOfAuthor"));
|
||||
entityTypeService.findByEntityType(context, "Person"), "isAuthorOfPublication",
|
||||
"isPublicationOfAuthor"));
|
||||
assertThat(relationshipsForArticle.size(), is(3));
|
||||
List<Item> expectedRelationshipsItemsForArticle = new ArrayList<>();
|
||||
expectedRelationshipsItemsForArticle.add(itemC);
|
||||
@@ -149,7 +169,7 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
}
|
||||
}
|
||||
assertThat(true, Matchers.is(actualRelationshipsItemsForArticle
|
||||
.containsAll(expectedRelationshipsItemsForArticle)));
|
||||
.containsAll(expectedRelationshipsItemsForArticle)));
|
||||
}
|
||||
|
||||
private void updateArticleItemToAddAnotherRelationship(Collection col1, Item article, Item itemB, Item itemC,
|
||||
@@ -222,22 +242,104 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
|
||||
}
|
||||
|
||||
private void performImportScript(String[] csv) throws Exception {
|
||||
String filename = "test.csv";
|
||||
BufferedWriter out = new BufferedWriter(
|
||||
new OutputStreamWriter(
|
||||
new FileOutputStream(filename), "UTF-8"));
|
||||
for (String csvLine : csv) {
|
||||
out.write(csvLine + "\n");
|
||||
}
|
||||
out.flush();
|
||||
out.close();
|
||||
out = null;
|
||||
InputStream inputStream = new ByteArrayInputStream(String.join(System.lineSeparator(),
|
||||
Arrays.asList(csv))
|
||||
.getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
runDSpaceScript("metadata-import", "-f", filename, "-e", "admin@email.com", "-s");
|
||||
MockMultipartFile bitstreamFile = new MockMultipartFile("file",
|
||||
"test.csv", MediaType.TEXT_PLAIN_VALUE,
|
||||
inputStream);
|
||||
|
||||
File file = new File(filename);
|
||||
if (file.exists()) {
|
||||
file.delete();
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
parameters.add(new DSpaceCommandLineParameter("-f", "test.csv"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-s", ""));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token)
|
||||
.perform(fileUpload("/api/system/scripts/metadata-import/processes").file(bitstreamFile)
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
String t = "";
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idRef.get());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void csvImportWithSpecifiedEPersonParameterTestShouldFailProcess() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build();
|
||||
Collection col3 = CollectionBuilder.createCollection(context, child1).withName("OrgUnits").build();
|
||||
|
||||
Item article = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Article")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withRelationshipType("Publication")
|
||||
.build();
|
||||
|
||||
String csvLineString = "+," + col1.getHandle() + ",TestItemB,Person," + article
|
||||
.getID().toString();
|
||||
String[] csv = {"id,collection,dc.title,relationship.type,relation.isPublicationOfAuthor", csvLineString};
|
||||
|
||||
InputStream inputStream = new ByteArrayInputStream(String.join(System.lineSeparator(),
|
||||
Arrays.asList(csv))
|
||||
.getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
MockMultipartFile bitstreamFile = new MockMultipartFile("file",
|
||||
"test.csv", MediaType.TEXT_PLAIN_VALUE,
|
||||
inputStream);
|
||||
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
parameters.add(new DSpaceCommandLineParameter("-f", "test.csv"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-s", ""));
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", "dspace@dspace.com"));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token)
|
||||
.perform(fileUpload("/api/system/scripts/metadata-import/processes").file(bitstreamFile)
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("metadata-import",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.FAILED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idRef.get());
|
||||
}
|
||||
|
||||
Iterator<Item> itemIteratorItem = itemService.findByMetadataField(context, "dc", "title", null, "TestItemB");
|
||||
assertFalse(itemIteratorItem.hasNext());
|
||||
}
|
||||
}
|
||||
|
@@ -58,12 +58,23 @@ public class CommunityMatcher {
|
||||
);
|
||||
}
|
||||
|
||||
public static Matcher<? super Object> matchCommunityEntryNonAdminEmbeds(String name, UUID uuid, String handle) {
|
||||
return allOf(
|
||||
matchProperties(name, uuid, handle),
|
||||
hasJsonPath("$._embedded.collections", Matchers.not(Matchers.empty())),
|
||||
hasJsonPath("$._embedded.logo", Matchers.not(Matchers.empty())),
|
||||
matchLinks(uuid),
|
||||
matchNonAdminEmbeds()
|
||||
);
|
||||
}
|
||||
|
||||
public static Matcher<? super Object> matchCommunityEntryFullProjection(String name, UUID uuid, String handle) {
|
||||
return allOf(
|
||||
matchProperties(name, uuid, handle),
|
||||
hasJsonPath("$._embedded.collections", Matchers.not(Matchers.empty())),
|
||||
hasJsonPath("$._embedded.logo", Matchers.not(Matchers.empty())),
|
||||
matchLinks(uuid)
|
||||
matchLinks(uuid),
|
||||
matchFullEmbeds()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -82,7 +93,7 @@ public class CommunityMatcher {
|
||||
/**
|
||||
* Gets a matcher for all expected embeds when the full projection is requested.
|
||||
*/
|
||||
public static Matcher<? super Object> matchFullEmbeds() {
|
||||
public static Matcher<? super Object> matchNonAdminEmbeds() {
|
||||
return matchEmbeds(
|
||||
"collections[]",
|
||||
"logo",
|
||||
@@ -91,6 +102,19 @@ public class CommunityMatcher {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a matcher for all expected embeds when the full projection is requested.
|
||||
*/
|
||||
public static Matcher<? super Object> matchFullEmbeds() {
|
||||
return matchEmbeds(
|
||||
"collections[]",
|
||||
"logo",
|
||||
"parentCommunity",
|
||||
"subcommunities[]",
|
||||
"adminGroup"
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a matcher for all expected links.
|
||||
*/
|
||||
@@ -117,7 +141,7 @@ public class CommunityMatcher {
|
||||
);
|
||||
}
|
||||
|
||||
public static String getFullEmbedsParameters() {
|
||||
public static String getNonAdminEmbeds() {
|
||||
return "collections,logo,parentCommunity,subcommunities";
|
||||
}
|
||||
|
||||
|
@@ -40,7 +40,7 @@ import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* IT for {@link CurationCli}
|
||||
* IT for {@link Curation}
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 24/06/2020
|
||||
*/
|
||||
@@ -75,7 +75,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", "invalidTaskOption"));
|
||||
|
||||
@@ -95,98 +94,12 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void curateScript_MissingEperson() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
Item publicItem1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Request with missing required -e <email>
|
||||
getClient(token)
|
||||
.perform(post(CURATE_SCRIPT_ENDPOINT).contentType("multipart/form-data")
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
// Illegal Argument Exception
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void curateScript_NonExistentEPerson() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
Item publicItem1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", "nonExistentEmail@test.com"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
// Request with -e <nonExistingEPersonEmail>
|
||||
getClient(token)
|
||||
.perform(post(CURATE_SCRIPT_ENDPOINT).contentType("multipart/form-data")
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
// Illegal Argument Exception
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void curateScript_MissingHandle() throws Exception {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
@@ -210,7 +123,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", "invalidhandle"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
@@ -250,7 +162,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
@@ -275,7 +186,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", "all"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-s", "invalidScope"));
|
||||
|
||||
@@ -299,7 +209,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", "all"));
|
||||
parameters.add(new DSpaceCommandLineParameter("-T", "invalidTaskFile"));
|
||||
|
||||
@@ -341,7 +250,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
@@ -361,7 +269,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.SCHEDULED))))
|
||||
ProcessStatus.COMPLETED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
} finally {
|
||||
@@ -394,7 +302,6 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
File taskFile = new File(testProps.get("test.curateTaskFile").toString());
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", admin.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-T", taskFile.getAbsolutePath()));
|
||||
|
||||
@@ -414,7 +321,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.SCHEDULED))))
|
||||
ProcessStatus.COMPLETED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
} finally {
|
||||
@@ -422,4 +329,57 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void curateScript_EPersonInParametersFails() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Parent Community")
|
||||
.build();
|
||||
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
|
||||
.withName("Sub Community")
|
||||
.build();
|
||||
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
|
||||
|
||||
Item publicItem1 = ItemBuilder.createItem(context, col1)
|
||||
.withTitle("Public item 1")
|
||||
.withIssueDate("2017-10-17")
|
||||
.withAuthor("Smith, Donald").withAuthor("Doe, John")
|
||||
.withSubject("ExtraEntry")
|
||||
.build();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-e", eperson.getEmail()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-i", publicItem1.getHandle()));
|
||||
parameters.add(new DSpaceCommandLineParameter("-t", CurationClientOptions.getTaskOptions().get(0)));
|
||||
|
||||
List<ParameterValueRest> list = parameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
AtomicReference<Integer> idRef = new AtomicReference<>();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
try {
|
||||
|
||||
getClient(token)
|
||||
.perform(post(CURATE_SCRIPT_ENDPOINT).contentType("multipart/form-data")
|
||||
.param("properties",
|
||||
new Gson().toJson(list)))
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(admin.getID()), parameters,
|
||||
ProcessStatus.FAILED))))
|
||||
.andDo(result -> idRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idRef.get());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@@ -37,7 +37,7 @@ if [ "$JAVA_OPTS" = "" ]; then
|
||||
fi
|
||||
|
||||
# Remove lock file, in case the old Handle server did not shut down properly
|
||||
rm -f $handledir/txns/lock
|
||||
rm -f $HANDLEDIR/txns/lock
|
||||
|
||||
# Start the Handle server, with a special log4j properties file.
|
||||
# We cannot simply write to the same logs, since log4j
|
||||
|
@@ -672,7 +672,7 @@ event.dispatcher.noindex.consumers = eperson
|
||||
|
||||
# consumer to maintain the discovery index
|
||||
event.consumer.discovery.class = org.dspace.discovery.IndexEventConsumer
|
||||
event.consumer.discovery.filters = Community|Collection|Item|Bundle+Add|Create|Modify|Modify_Metadata|Delete|Remove
|
||||
event.consumer.discovery.filters = Community|Collection|Item|Bundle|Site+Add|Create|Modify|Modify_Metadata|Delete|Remove
|
||||
|
||||
# consumer related to EPerson changes
|
||||
event.consumer.eperson.class = org.dspace.eperson.EPersonConsumer
|
||||
@@ -1429,6 +1429,10 @@ webui.content_disposition_threshold = 8388608
|
||||
# the directory where the generated sitemaps are stored
|
||||
sitemap.dir = ${dspace.dir}/sitemaps
|
||||
|
||||
# Customize the path of sitemaps in the server webapp
|
||||
# Defaults to "sitemaps", which means they are available at ${dspace.server.url}/sitemaps/
|
||||
# sitemap.path = sitemaps
|
||||
|
||||
#
|
||||
# Comma-separated list of search engine URLs to 'ping' when a new Sitemap has
|
||||
# been created. Include everything except the Sitemap URL itself (which will
|
||||
@@ -1442,6 +1446,14 @@ sitemap.engineurls = http://www.google.com/webmasters/sitemaps/ping?sitemap=
|
||||
#
|
||||
# No known Sitemap 'ping' URL for MSN/Live search
|
||||
|
||||
# Define cron for how frequently the sitemap should refresh.
|
||||
# Defaults to running daily at 1:15am
|
||||
# Cron syntax is defined at https://www.quartz-scheduler.org/api/2.3.0/org/quartz/CronTrigger.html
|
||||
# Remove (comment out) this config to disable the sitemap scheduler.
|
||||
# Sitemap scheduler can also be disabled by setting to "-" (single dash) in local.cfg.
|
||||
# Keep in mind, changing the schedule requires rebooting your servlet container, e.g. Tomcat.
|
||||
sitemap.cron = 0 15 1 * * ?
|
||||
|
||||
##### SHERPA/Romeo Integration Settings ####
|
||||
# the SHERPA/RoMEO endpoint
|
||||
sherpa.romeo.url = http://www.sherpa.ac.uk/romeo/api29.php
|
||||
|
@@ -16,5 +16,18 @@
|
||||
<scope_note></scope_note>
|
||||
</dc-type>
|
||||
|
||||
<dc-type>
|
||||
<schema>dspace</schema>
|
||||
<element>agreements</element>
|
||||
<qualifier>end-user</qualifier>
|
||||
<scope_note>Stores whether the End User Agreement has been accepted by an EPerson. Valid values; true, false</scope_note>
|
||||
</dc-type>
|
||||
|
||||
<dc-type>
|
||||
<schema>dspace</schema>
|
||||
<element>agreements</element>
|
||||
<qualifier>cookies</qualifier>
|
||||
<scope_note>Stores the cookie preferences of an EPerson, as selected in last session. Value will be an array of cookieName/boolean pairs, specifying which cookies are allowed or not allowed.</scope_note>
|
||||
</dc-type>
|
||||
|
||||
</dspace-dc-types>
|
||||
|
119
dspace/config/spring/api/arxiv-integration.xml
Normal file
119
dspace/config/spring/api/arxiv-integration.xml
Normal file
@@ -0,0 +1,119 @@
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:context="http://www.springframework.org/schema/context"
|
||||
xmlns:util="http://www.springframework.org/schema/util"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans
|
||||
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
|
||||
http://www.springframework.org/schema/context
|
||||
http://www.springframework.org/schema/context/spring-context-2.5.xsd http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd"
|
||||
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
|
||||
|
||||
<context:annotation-config/>
|
||||
<!-- allows us to use spring annotations in beans -->
|
||||
|
||||
<util:map id="arxivMetadataFieldMap" key-type="org.dspace.importer.external.metadatamapping.MetadataFieldConfig"
|
||||
value-type="org.dspace.importer.external.metadatamapping.contributor.MetadataContributor">
|
||||
<description>Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
|
||||
only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
|
||||
what metadatafield is generated.
|
||||
</description>
|
||||
<entry key-ref="arxiv.title" value-ref="arxivTitleContrib"/>
|
||||
<entry key-ref="arxiv.summary" value-ref="arxivSummaryContrib"/>
|
||||
<entry key-ref="arxiv.published" value-ref="arxivPublishedContrib"/>
|
||||
<entry key-ref="arxiv.arxiv.doi" value-ref="arxivDoiContrib"/>
|
||||
<entry key-ref="arxiv.arxiv.journal_ref" value-ref="arxivJournalContrib"/>
|
||||
<entry key-ref="arxiv.category.term" value-ref="arxivCategoryTermContrib"/>
|
||||
<entry key-ref="arxiv.author.name" value-ref="arxivAuthorContrib"/>
|
||||
<entry key-ref="arxiv.identifier.other" value-ref="arxivOtherContrib"/>
|
||||
</util:map>
|
||||
|
||||
|
||||
<bean id="arxivOtherContrib" class="org.dspace.importer.external.arxiv.metadatamapping.contributor.ArXivIdMetadataContributor">
|
||||
<property name="field" ref="arxiv.identifier.other"/>
|
||||
<property name="query" value="ns:id"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.identifier.other" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.identifier.other"/>
|
||||
</bean>
|
||||
|
||||
|
||||
<bean id="arxivTitleContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.title"/>
|
||||
<property name="query" value="ns:title"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.title" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.title"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivSummaryContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.summary"/>
|
||||
<property name="query" value="ns:summary"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.summary" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.description.abstract"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivPublishedContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.published"/>
|
||||
<property name="query" value="ns:published"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.published" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.date.issued"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivDoiContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.arxiv.doi"/>
|
||||
<property name="query" value="arxiv:doi"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivArxivPrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.arxiv.doi" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.identifier"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivJournalContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.arxiv.journal_ref"/>
|
||||
<property name="query" value="arxiv:journal_ref"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivArxivPrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.arxiv.journal_ref" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.source"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivCategoryTermContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.category.term"/>
|
||||
<property name="query" value="ns:category/@term"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.category.term" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.subject"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivAuthorContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleXpathMetadatumContributor">
|
||||
<property name="field" ref="arxiv.author.name"/>
|
||||
<property name="query" value="ns:author/name"/>
|
||||
<property name="prefixToNamespaceMapping" ref="arxivBasePrefixToNamespaceMapping"/>
|
||||
</bean>
|
||||
<bean id="arxiv.author.name" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
|
||||
<constructor-arg value="dc.contributor.author"/>
|
||||
</bean>
|
||||
|
||||
<util:map id="arxivBasePrefixToNamespaceMapping" map-class="java.util.HashMap"
|
||||
key-type="java.lang.String" value-type="java.lang.String">
|
||||
<entry key="http://www.w3.org/2005/Atom" value="ns" />
|
||||
</util:map>
|
||||
|
||||
|
||||
<util:map id="arxivArxivPrefixToNamespaceMapping" map-class="java.util.HashMap"
|
||||
key-type="java.lang.String" value-type="java.lang.String">
|
||||
<entry key="http://arxiv.org/schemas/atom" value="arxiv" />
|
||||
</util:map>
|
||||
|
||||
<bean class="java.lang.Integer" id="maxRetry">
|
||||
<constructor-arg value="3"/>
|
||||
</bean>
|
||||
|
||||
</beans>
|
@@ -14,9 +14,7 @@
|
||||
<!-- Specify here any data loaders that you want to have access to in the command line batch import.
|
||||
Key is the name that you need to specify in the "-i" option in the command line script when "-b"
|
||||
option is used (which means BTE Batch import) -->
|
||||
<entry key="pubmedXML" value-ref="pubmedFileDataLoader" />
|
||||
<entry key="crossrefXML" value-ref="crossRefFileDataLoader" />
|
||||
<entry key="arxivXML" value-ref="arXivFileDataLoader" />
|
||||
<entry key="ciniiXML" value-ref="ciniiFileDataLoader" />
|
||||
<entry key="bibtex" value-ref="bibTeXDataLoader" />
|
||||
<entry key="ris" value-ref="risDataLoader" />
|
||||
@@ -79,7 +77,6 @@
|
||||
<value>jeissn</value>
|
||||
<value>pisbn</value>
|
||||
<value>eisbn</value>
|
||||
<value>arxivCategory</value>
|
||||
<value>keywords</value>
|
||||
<value>mesh</value>
|
||||
<value>language</value>
|
||||
@@ -106,13 +103,9 @@
|
||||
<!-- Specify here any data loaders you want to include in the submission lookup process.
|
||||
Dataloaders must either extend the "NetworkSubmissionLookupDataLoader" abstract class
|
||||
or conform to "FileDataLoader" interface of BTE -->
|
||||
<entry key="pubmed" value-ref="pubmedOnlineDataLoader"/>
|
||||
<entry key="crossref" value-ref="crossRefOnlineDataLoader"/>
|
||||
<entry key="arxiv" value-ref="arXivOnlineDataLoader"/>
|
||||
<entry key="cinii" value-ref="ciniiOnlineDataLoader"/>
|
||||
<entry key="pubmedXML" value-ref="pubmedFileDataLoader"/>
|
||||
<entry key="crossRefXML" value-ref="crossRefFileDataLoader"/>
|
||||
<entry key="arXivXML" value-ref="arXivFileDataLoader"/>
|
||||
<entry key="ciniiXML" value-ref="ciniiFileDataLoader"/>
|
||||
<entry key="bibtex" value-ref="bibTeXDataLoader"/>
|
||||
<entry key="ris" value-ref="risDataLoader"/>
|
||||
@@ -129,40 +122,11 @@
|
||||
<bean name="phase1LinearWorkflow" class="gr.ekt.bte.core.LinearWorkflow">
|
||||
<property name="process">
|
||||
<list>
|
||||
<ref bean="mapConverter_arxivSubject"/>
|
||||
<ref bean="mapConverter_pubstatusPubmed"/>
|
||||
<ref bean="removeLastDot"/>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Converts an input value to an output one -->
|
||||
<bean name="mapConverter_arxivSubject" class="org.dspace.submit.lookup.MapConverterModifier" init-method="init">
|
||||
<constructor-arg value="mapConverter_arxivSubject Modifier"/>
|
||||
<property name="converterNameFile" value="mapConverter-arxivSubject.properties"/>
|
||||
<property name="configurationService" ref="org.dspace.services.ConfigurationService"/>
|
||||
<property name="fieldKeys">
|
||||
<list>
|
||||
<!-- Specify the internal BTE keys that this modifier needs to be applied for -->
|
||||
<value>arxivCategory</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Converts an input value to an output one -->
|
||||
<bean name="mapConverter_pubstatusPubmed" class="org.dspace.submit.lookup.MapConverterModifier" init-method="init">
|
||||
<constructor-arg value="mapConverter_pubstatusPubmed Modifier"/>
|
||||
<property name="converterNameFile" value="mapConverter-pubstatusPubmed.properties"/>
|
||||
<property name="configurationService" ref="org.dspace.services.ConfigurationService"/>
|
||||
<property name="defaultValue" value="Subjected to Journal"/>
|
||||
<property name="fieldKeys">
|
||||
<list>
|
||||
<!-- Specify the internal BTE keys that this modifier needs to be applied for -->
|
||||
<value>publicationStatus</value>
|
||||
</list>
|
||||
</property>
|
||||
</bean>
|
||||
|
||||
<!-- Remove the last dot in the specified field keys -->
|
||||
<bean name="removeLastDot" class="org.dspace.submit.lookup.RemoveLastDotModifier">
|
||||
<constructor-arg value="removeLastDot Modifier"/>
|
||||
@@ -357,75 +321,6 @@
|
||||
value="http://ebooks.serrelib.gr/serrelib-oai/request" />
|
||||
</bean>
|
||||
|
||||
<!-- PubMed -->
|
||||
<bean id="pubmedOnlineDataLoader" class="org.dspace.submit.lookup.PubmedOnlineDataLoader">
|
||||
<property name="searchProvider" value="false" />
|
||||
<property name="fieldMap" ref="pubmedInputMap" />
|
||||
</bean>
|
||||
|
||||
<bean id="pubmedFileDataLoader" class="org.dspace.submit.lookup.PubmedFileDataLoader">
|
||||
<property name="fieldMap" ref="pubmedInputMap" />
|
||||
</bean>
|
||||
|
||||
<bean name="pubmedInputMap" class="java.util.HashMap" scope="prototype">
|
||||
<constructor-arg>
|
||||
<map key-type="java.lang.String" value-type="java.lang.String">
|
||||
<entry key="pubmedID" value="pubmedID" />
|
||||
<entry key="doi" value="doi" />
|
||||
<entry key="printISSN" value="jissn" />
|
||||
<entry key="electronicISSN" value="jeissn" />
|
||||
<entry key="journalTitle" value="journal" />
|
||||
<entry key="articleTitle" value="title" />
|
||||
<entry key="pubDate" value="issued" />
|
||||
<entry key="journalVolume" value="volume" />
|
||||
<entry key="journalIssue" value="issue" />
|
||||
<entry key="language" value="language" />
|
||||
<entry key="publicationType" value="subtype" />
|
||||
<entry key="primaryKeyword" value="keywords" />
|
||||
<entry key="secondaryKeyword" value="keywords" />
|
||||
<entry key="primaryMeshHeading" value="mesh" />
|
||||
<entry key="secondaryMeshHeading" value="mesh" />
|
||||
<entry key="startPage" value="firstpage" />
|
||||
<entry key="endPage" value="lastpage" />
|
||||
<entry key="abstractText" value="abstract" />
|
||||
<entry key="publicationStatus" value="publicationStatus" />
|
||||
<entry key="author" value="authors" />
|
||||
<!-- Not used -->
|
||||
<!--
|
||||
<entry key="pubblicationModel" value="" />
|
||||
-->
|
||||
</map>
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
|
||||
<!-- Arxiv Data Loaders -->
|
||||
<bean id="arXivOnlineDataLoader" class="org.dspace.submit.lookup.ArXivOnlineDataLoader">
|
||||
<property name="searchProvider" value="false" />
|
||||
<property name="fieldMap" ref="arxivInputMap" />
|
||||
</bean>
|
||||
|
||||
<bean id="arXivFileDataLoader" class="org.dspace.submit.lookup.ArXivFileDataLoader">
|
||||
<property name="fieldMap" ref="arxivInputMap" />
|
||||
</bean>
|
||||
|
||||
<bean name="arxivInputMap" class="java.util.HashMap" scope="prototype">
|
||||
<constructor-arg>
|
||||
<map key-type="java.lang.String" value-type="java.lang.String">
|
||||
<entry key="journalRef" value="journal" />
|
||||
<entry key="doi" value="doi" />
|
||||
<entry key="author" value="authors" />
|
||||
<entry key="authorWithAffiliation" value="authorsWithAffiliation" />
|
||||
<entry key="comment" value="note" />
|
||||
<entry key="published" value="issued" />
|
||||
<entry key="articleTitle" value="title" />
|
||||
<entry key="summary" value="abstract" />
|
||||
<entry key="id" value="url" />
|
||||
<entry key="pdfUrl" value="fulltextUrl" />
|
||||
<entry key="primaryCategory" value="arxivCategory" />
|
||||
<entry key="category" value="arxivCategory" />
|
||||
</map>
|
||||
</constructor-arg>
|
||||
</bean>
|
||||
|
||||
<!-- CrossRef Data Loaders -->
|
||||
<bean id="crossRefOnlineDataLoader" class="org.dspace.submit.lookup.CrossRefOnlineDataLoader">
|
||||
@@ -544,7 +439,6 @@
|
||||
<entry value="chairs" key="dc.contributor.other" />
|
||||
<entry value="abstract" key="dc.description.abstract" />
|
||||
<entry value="allkeywords" key="dc.subject" />
|
||||
<entry value="arxivCategory" key="dc.subject" />
|
||||
<entry value="doi" key="dc.identifier" />
|
||||
<entry value="publisher" key="dc.publisher" />
|
||||
<!-- Not used - new metadata fields need to be declared for them in DSpace registry -->
|
||||
@@ -553,7 +447,6 @@
|
||||
<entry value="note" key="" />
|
||||
<entry value="fulltextUrl" key="" />
|
||||
<entry value="authorsWithAffiliation" key="" />
|
||||
<entry value="pubmedID" key="" />
|
||||
<entry value="publicationStatus" key="" />
|
||||
<entry value="jeissn" key="" />
|
||||
<entry value="volume" key="" />
|
||||
|
@@ -126,6 +126,7 @@
|
||||
<bean class="org.dspace.discovery.indexobject.PoolTaskIndexFactoryImpl" autowire-candidate="true"/>
|
||||
<bean class="org.dspace.discovery.indexobject.WorkflowItemIndexFactoryImpl" autowire-candidate="true"/>
|
||||
<bean class="org.dspace.discovery.indexobject.WorkspaceItemIndexFactoryImpl" autowire-candidate="true"/>
|
||||
<bean class="org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl" autowire-candidate="true"/>
|
||||
|
||||
</beans>
|
||||
|
||||
|
@@ -30,5 +30,18 @@
|
||||
<property name="url" value="${lcname.url}"/>
|
||||
<property name="sourceIdentifier" value="lcname"/>
|
||||
</bean>
|
||||
|
||||
<bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
|
||||
<property name="metadataSource" ref="PubmedImportService"/>
|
||||
<property name="sourceIdentifier" value="pubmed"/>
|
||||
<property name="recordIdMetadata" value="dc.identifier.other"/>
|
||||
</bean>
|
||||
|
||||
<bean id="arxivLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
|
||||
<property name="metadataSource" ref="ArXivImportService"/>
|
||||
<property name="sourceIdentifier" value="arxiv"/>
|
||||
<property name="recordIdMetadata" value="dc.identifier.other"/>
|
||||
</bean>
|
||||
|
||||
</beans>
|
||||
|
||||
|
@@ -14,12 +14,12 @@
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataImportCLI"/>
|
||||
</bean>
|
||||
|
||||
<bean id="metadata-export" class="org.dspace.app.bulkedit.MetadataExportScriptConfiguration">
|
||||
<bean id="metadata-export" class="org.dspace.app.bulkedit.MetadataExportCliScriptConfiguration">
|
||||
<property name="description" value="Export metadata for batch editing"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExport"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/>
|
||||
</bean>
|
||||
|
||||
<bean id="curate" class="org.dspace.curate.CurationScriptConfiguration">
|
||||
<bean id="curate" class="org.dspace.curate.CurationCliScriptConfiguration">
|
||||
<property name="description" value="Curation tasks"/>
|
||||
<property name="dspaceRunnableClass" value="org.dspace.curate.CurationCli"/>
|
||||
</bean>
|
||||
|
@@ -13,9 +13,7 @@
|
||||
</property>
|
||||
<property name="dataloadersMap">
|
||||
<map>
|
||||
<entry key="pubmed" value-ref="pubmedOnlineDataLoader"/>
|
||||
<entry key="crossref" value-ref="crossRefOnlineDataLoader"/>
|
||||
<entry key="arxiv" value-ref="arXivOnlineDataLoader"/>
|
||||
<entry key="cinii" value-ref="ciniiOnlineDataLoader"/>
|
||||
</map>
|
||||
</property>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user