Merge branch 'main' into contract-pr-222-primary-bitstream-on-bundle-support

This commit is contained in:
Nona Luypaert
2023-06-08 23:09:52 +02:00
116 changed files with 5416 additions and 681 deletions

View File

@@ -7,33 +7,16 @@
*/ */
package org.dspace.administer; package org.dspace.administer;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
*/ */
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> { public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -7,33 +7,16 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
*/ */
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> { public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -7,22 +7,14 @@
*/ */
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link MetadataExport} script * The {@link ScriptConfiguration} for the {@link MetadataExport} script
*/ */
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> { public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -9,7 +9,6 @@
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
/** /**
@@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSea
this.dspaceRunnableclass = dspaceRunnableClass; this.dspaceRunnableclass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -8,22 +8,15 @@
package org.dspace.app.bulkedit; package org.dspace.app.bulkedit;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link MetadataImport} script * The {@link ScriptConfiguration} for the {@link MetadataImport} script
*/ */
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> { public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -7,18 +7,11 @@
*/ */
package org.dspace.app.harvest; package org.dspace.app.harvest;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> { public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@@ -32,13 +25,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();

View File

@@ -7,14 +7,9 @@
*/ */
package org.dspace.app.itemexport; package org.dspace.app.itemexport;
import java.sql.SQLException;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link ItemExport} script * The {@link ScriptConfiguration} for the {@link ItemExport} script
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> { public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptC
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();

View File

@@ -23,6 +23,7 @@ import java.util.UUID;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.tika.Tika;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
protected boolean zip = false; protected boolean zip = false;
protected boolean remoteUrl = false; protected boolean remoteUrl = false;
protected String zipfilename = null; protected String zipfilename = null;
protected boolean zipvalid = false;
protected boolean help = false; protected boolean help = false;
protected File workDir = null; protected File workDir = null;
protected File workFile = null; protected File workFile = null;
@@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
handler.logInfo("***End of Test Run***"); handler.logInfo("***End of Test Run***");
} }
} finally { } finally {
// clean work dir
if (zip) { if (zip) {
FileUtils.deleteDirectory(new File(sourcedir)); // if zip file was valid then clean sourcedir
FileUtils.deleteDirectory(workDir); if (zipvalid && sourcedir != null && new File(sourcedir).exists()) {
if (remoteUrl && workFile != null && workFile.exists()) { FileUtils.deleteDirectory(new File(sourcedir));
}
// clean workdir
if (workDir != null && workDir.exists()) {
FileUtils.deleteDirectory(workDir);
}
// conditionally clean workFile if import was done in the UI or via a URL and it still exists
if (workFile != null && workFile.exists()) {
workFile.delete(); workFile.delete();
} }
} }
@@ -322,14 +332,23 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
*/ */
protected void readZip(Context context, ItemImportService itemImportService) throws Exception { protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
Optional<InputStream> optionalFileStream = Optional.empty(); Optional<InputStream> optionalFileStream = Optional.empty();
Optional<InputStream> validationFileStream = Optional.empty();
if (!remoteUrl) { if (!remoteUrl) {
// manage zip via upload // manage zip via upload
optionalFileStream = handler.getFileStream(context, zipfilename); optionalFileStream = handler.getFileStream(context, zipfilename);
validationFileStream = handler.getFileStream(context, zipfilename);
} else { } else {
// manage zip via remote url // manage zip via remote url
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
} }
if (optionalFileStream.isPresent()) {
if (validationFileStream.isPresent()) {
// validate zip file
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID()); + zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
@@ -337,10 +356,32 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename); "Error reading file, the file couldn't be found for filename: " + zipfilename);
} }
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} }
/**
* Confirm that the zip file has the correct MIME type
* @param inputStream
*/
protected void validateZip(InputStream inputStream) {
Tika tika = new Tika();
try {
String mimeType = tika.detect(inputStream);
if (mimeType.equals("application/zip")) {
zipvalid = true;
} else {
handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType);
throw new UnsupportedOperationException("A valid zip file must be supplied");
}
} catch (IOException e) {
throw new IllegalArgumentException(
"There was an error while reading the zip file: " + zipfilename);
}
}
/** /**
* Read the mapfile * Read the mapfile
* @param context * @param context

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.net.URL; import java.net.URL;
import java.sql.SQLException; import java.sql.SQLException;
@@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport {
// If this is a zip archive, unzip it first // If this is a zip archive, unzip it first
if (zip) { if (zip) {
if (!remoteUrl) { if (!remoteUrl) {
// confirm zip file exists
File myZipFile = new File(sourcedir + File.separator + zipfilename);
if ((!myZipFile.exists()) || (!myZipFile.isFile())) {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
// validate zip file
InputStream validationFileStream = new FileInputStream(myZipFile);
validateZip(validationFileStream);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID()); + File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip( sourcedir = itemImportService.unzip(
@@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport {
// manage zip via remote url // manage zip via remote url
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (optionalFileStream.isPresent()) { if (optionalFileStream.isPresent()) {
// validate zip file via url
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID()); + zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else { } else {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename); "Error reading file, the file couldn't be found for filename: " + zipfilename);
} }
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} }
} }
} }

View File

@@ -8,14 +8,10 @@
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link ItemImport} script * The {@link ScriptConfiguration} for the {@link ItemImport} script
@@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> { public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();

View File

@@ -7,25 +7,16 @@
*/ */
package org.dspace.app.mediafilter; package org.dspace.app.mediafilter;
import java.sql.SQLException;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> { public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
@Override @Override
public Class<T> getDspaceRunnableClass() { public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass; return dspaceRunnableClass;
@@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(final Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();

View File

@@ -8,7 +8,6 @@
package org.dspace.app.solrdatabaseresync; package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
/** /**
@@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.SearchServiceException;
import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document; import org.w3c.dom.Document;
@@ -105,6 +108,13 @@ public class SubmissionConfigReader {
*/ */
private SubmissionConfig lastSubmissionConfig = null; private SubmissionConfig lastSubmissionConfig = null;
/**
* Collection Service instance, needed to interact with collection's
* stored data
*/
protected static final CollectionService collectionService
= ContentServiceFactory.getInstance().getCollectionService();
/** /**
* Load Submission Configuration from the * Load Submission Configuration from the
* item-submission.xml configuration file * item-submission.xml configuration file
@@ -152,6 +162,9 @@ public class SubmissionConfigReader {
} catch (FactoryConfigurationError fe) { } catch (FactoryConfigurationError fe) {
throw new SubmissionConfigReaderException( throw new SubmissionConfigReaderException(
"Cannot create Item Submission Configuration parser", fe); "Cannot create Item Submission Configuration parser", fe);
} catch (SearchServiceException se) {
throw new SubmissionConfigReaderException(
"Cannot perform a discovery search for Item Submission Configuration", se);
} catch (Exception e) { } catch (Exception e) {
throw new SubmissionConfigReaderException( throw new SubmissionConfigReaderException(
"Error creating Item Submission Configuration: " + e); "Error creating Item Submission Configuration: " + e);
@@ -287,7 +300,7 @@ public class SubmissionConfigReader {
* should correspond to the collection-form maps, the form definitions, and * should correspond to the collection-form maps, the form definitions, and
* the display/storage word pairs. * the display/storage word pairs.
*/ */
private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException {
if (n == null) { if (n == null) {
return; return;
} }
@@ -334,18 +347,23 @@ public class SubmissionConfigReader {
* the collection handle and item submission name, put name in hashmap keyed * the collection handle and item submission name, put name in hashmap keyed
* by the collection handle. * by the collection handle.
*/ */
private void processMap(Node e) throws SAXException { private void processMap(Node e) throws SAXException, SearchServiceException {
// create a context
Context context = new Context();
NodeList nl = e.getChildNodes(); NodeList nl = e.getChildNodes();
int len = nl.getLength(); int len = nl.getLength();
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
Node nd = nl.item(i); Node nd = nl.item(i);
if (nd.getNodeName().equals("name-map")) { if (nd.getNodeName().equals("name-map")) {
String id = getAttribute(nd, "collection-handle"); String id = getAttribute(nd, "collection-handle");
String entityType = getAttribute(nd, "collection-entity-type");
String value = getAttribute(nd, "submission-name"); String value = getAttribute(nd, "submission-name");
String content = getValue(nd); String content = getValue(nd);
if (id == null) { if (id == null && entityType == null) {
throw new SAXException( throw new SAXException(
"name-map element is missing collection-handle attribute in 'item-submission.xml'"); "name-map element is missing collection-handle or collection-entity-type attribute " +
"in 'item-submission.xml'");
} }
if (value == null) { if (value == null) {
throw new SAXException( throw new SAXException(
@@ -355,7 +373,17 @@ public class SubmissionConfigReader {
throw new SAXException( throw new SAXException(
"name-map element has content in 'item-submission.xml', it should be empty."); "name-map element has content in 'item-submission.xml', it should be empty.");
} }
collectionToSubmissionConfig.put(id, value); if (id != null) {
collectionToSubmissionConfig.put(id, value);
} else {
// get all collections for this entity-type
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,
entityType);
for (Collection collection : collections) {
collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value);
}
}
} // ignore any child node that isn't a "name-map" } // ignore any child node that isn't a "name-map"
} }
} }
@@ -635,4 +663,4 @@ public class SubmissionConfigReader {
} }
return results; return results;
} }
} }

View File

@@ -43,6 +43,7 @@ import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity; import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
@@ -755,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
} }
/**
* Checks that the context's current user is an item admin in the site by querying the solr database.
*
* @param context context with the current user
* @return true if the current user is an item admin in the site
* false when this is not the case, or an exception occurred
* @throws java.sql.SQLException passed through.
*/
@Override
public boolean isItemAdmin(Context context) throws SQLException {
return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE);
}
/** /**
* Checks that the context's current user is a community or collection admin in the site. * Checks that the context's current user is a community or collection admin in the site.
* *

View File

@@ -514,6 +514,15 @@ public interface AuthorizeService {
*/ */
boolean isCollectionAdmin(Context context) throws SQLException; boolean isCollectionAdmin(Context context) throws SQLException;
/**
* Checks that the context's current user is an item admin in the site by querying the solr database.
*
* @param context context with the current user
* @return true if the current user is an item admin in the site
* false when this is not the case, or an exception occurred
*/
boolean isItemAdmin(Context context) throws SQLException;
/** /**
* Checks that the context's current user is a community or collection admin in the site. * Checks that the context's current user is a community or collection admin in the site.
* *

View File

@@ -22,11 +22,13 @@ import org.dspace.sort.SortOption;
* This class holds all the information about a specifically configured * This class holds all the information about a specifically configured
* BrowseIndex. It is responsible for parsing the configuration, understanding * BrowseIndex. It is responsible for parsing the configuration, understanding
* about what sort options are available, and what the names of the database * about what sort options are available, and what the names of the database
* tables that hold all the information are actually called. * tables that hold all the information are actually called. Hierarchical browse
* indexes also contain information about the vocabulary they're using, see:
* {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex}
* *
* @author Richard Jones * @author Richard Jones
*/ */
public final class BrowseIndex { public class BrowseIndex {
/** the configuration number, as specified in the config */ /** the configuration number, as specified in the config */
/** /**
* used for single metadata browse tables for generating the table name * used for single metadata browse tables for generating the table name
@@ -102,7 +104,7 @@ public final class BrowseIndex {
* *
* @param baseName The base of the table name * @param baseName The base of the table name
*/ */
private BrowseIndex(String baseName) { protected BrowseIndex(String baseName) {
try { try {
number = -1; number = -1;
tableBaseName = baseName; tableBaseName = baseName;

View File

@@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO {
} }
private void addDefaultFilterQueries(DiscoverQuery query) { private void addDefaultFilterQueries(DiscoverQuery query) {
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container);
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
} }

View File

@@ -1047,4 +1047,24 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
return (int) resp.getTotalSearchResults(); return (int) resp.getTotalSearchResults();
} }
@Override
@SuppressWarnings("rawtypes")
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
throws SearchServiceException {
List<Collection> collectionList = new ArrayList<>();
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
discoverQuery.addFilterQueries("dspace.entity.type:" + entityType);
DiscoverResult discoverResult = searchService.search(context, discoverQuery);
List<IndexableObject> solrIndexableObjects = discoverResult.getIndexableObjects();
for (IndexableObject solrCollection : solrIndexableObjects) {
Collection c = ((IndexableCollection) solrCollection).getIndexedObject();
collectionList.add(c);
}
return collectionList;
}
} }

View File

@@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.HarvestedItem;
import org.dspace.harvest.service.HarvestedItemService; import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.identifier.DOI;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.DOIService;
import org.dspace.identifier.service.IdentifierService; import org.dspace.identifier.service.IdentifierService;
import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidHistory;
import org.dspace.orcid.OrcidQueue; import org.dspace.orcid.OrcidQueue;
@@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
protected IdentifierService identifierService; protected IdentifierService identifierService;
@Autowired(required = true) @Autowired(required = true)
protected DOIService doiService;
@Autowired(required = true)
protected VersioningService versioningService; protected VersioningService versioningService;
@Autowired(required = true) @Autowired(required = true)
protected HarvestedItemService harvestedItemService; protected HarvestedItemService harvestedItemService;
@@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// Remove any Handle // Remove any Handle
handleService.unbindHandle(context, item); handleService.unbindHandle(context, item);
// Delete a DOI if linked to the item.
// If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid,
// hibernate will throw a foreign constraint exception.
// Here we use the DOI service directly as it is able to manage DOIs even without any configured
// consumer or provider.
DOI doi = doiService.findDOIByDSpaceObject(context, item);
if (doi != null) {
doi.setDSpaceObject(null);
}
// remove version attached to the item // remove version attached to the item
removeVersion(context, item); removeVersion(context, item);

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.core.service.PluginService; import org.dspace.core.service.PluginService;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions = protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions =
new HashMap<String, Map<String, List<String>>>(); new HashMap<String, Map<String, List<String>>>();
// Map of vocabulary authorities to and their index info equivalent
protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>();
// the item submission reader // the item submission reader
private SubmissionConfigReader itemSubmissionConfigReader; private SubmissionConfigReader itemSubmissionConfigReader;
@@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@Autowired(required = true) @Autowired(required = true)
protected PluginService pluginService; protected PluginService pluginService;
@Autowired
private DiscoveryConfigurationService searchConfigurationService;
final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin.";
final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation.";
@@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getParentChoice(authorityName, vocabularyId, locale); return ma.getParentChoice(authorityName, vocabularyId, locale);
} }
@Override
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) {
if (this.vocabularyIndexMap.containsKey(nameVocab)) {
return this.vocabularyIndexMap.get(nameVocab);
} else {
init();
ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab);
if (source != null && source instanceof DSpaceControlledVocabulary) {
Set<String> metadataFields = new HashSet<>();
Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab);
for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) {
metadataFields.addAll(formToField.getValue().stream().map(value ->
StringUtils.replace(value, "_", "."))
.collect(Collectors.toList()));
}
DiscoverySearchFilterFacet matchingFacet = null;
for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) {
boolean coversAllFieldsFromVocab = true;
for (String fieldFromVocab: metadataFields) {
boolean coversFieldFromVocab = false;
for (String facetMdField: facetConfig.getMetadataFields()) {
if (facetMdField.startsWith(fieldFromVocab)) {
coversFieldFromVocab = true;
break;
}
}
if (!coversFieldFromVocab) {
coversAllFieldsFromVocab = false;
break;
}
}
if (coversAllFieldsFromVocab) {
matchingFacet = facetConfig;
break;
}
}
DSpaceControlledVocabularyIndex vocabularyIndex =
new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields,
matchingFacet);
this.vocabularyIndexMap.put(nameVocab, vocabularyIndex);
return vocabularyIndex;
}
return null;
}
}
} }

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.Set;
import org.dspace.browse.BrowseIndex;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
/**
* Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a
* {@code BrowseIndexRest}
* cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)}
*
* @author Marie Verdonck (Atmire) on 04/05/2023
*/
public class DSpaceControlledVocabularyIndex extends BrowseIndex {
protected DSpaceControlledVocabulary vocabulary;
protected Set<String> metadataFields;
protected DiscoverySearchFilterFacet facetConfig;
public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set<String> metadataFields,
DiscoverySearchFilterFacet facetConfig) {
super(controlledVocabulary.vocabularyName);
this.vocabulary = controlledVocabulary;
this.metadataFields = metadataFields;
this.facetConfig = facetConfig;
}
public DSpaceControlledVocabulary getVocabulary() {
return vocabulary;
}
public Set<String> getMetadataFields() {
return this.metadataFields;
}
public DiscoverySearchFilterFacet getFacetConfig() {
return this.facetConfig;
}
}

View File

@@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choice; import org.dspace.content.authority.Choice;
import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.Choices; import org.dspace.content.authority.Choices;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
/** /**
* Broker for ChoiceAuthority plugins, and for other information configured * Broker for ChoiceAuthority plugins, and for other information configured
@@ -220,4 +221,7 @@ public interface ChoiceAuthorityService {
* @return the parent Choice object if any * @return the parent Choice object if any
*/ */
public Choice getParentChoice(String authorityName, String vocabularyId, String locale); public Choice getParentChoice(String authorityName, String vocabularyId, String locale);
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab);
} }

View File

@@ -14,6 +14,7 @@ import java.util.List;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
import org.dspace.eperson.EPerson;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.ProcessQueryParameterContainer;
@@ -97,4 +98,26 @@ public interface ProcessDAO extends GenericDAO<Process> {
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date) List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
throws SQLException; throws SQLException;
/**
* Returns a list of all Process objects in the database by the given user.
*
* @param context The relevant DSpace context
* @param user The user to search for
* @param limit The limit for the amount of Processes returned
* @param offset The offset for the Processes to be returned
* @return The list of all Process objects in the Database
* @throws SQLException If something goes wrong
*/
List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException;
/**
* Count all the processes which is related to the given user.
*
* @param context The relevant DSpace context
* @param user The user to search for
* @return The number of results matching the query
* @throws SQLException If something goes wrong
*/
int countByUser(Context context, EPerson user) throws SQLException;
} }

View File

@@ -24,6 +24,7 @@ import org.dspace.content.ProcessStatus;
import org.dspace.content.dao.ProcessDAO; import org.dspace.content.dao.ProcessDAO;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.ProcessQueryParameterContainer;
import org.dspace.scripts.Process_; import org.dspace.scripts.Process_;
@@ -168,6 +169,33 @@ public class ProcessDAOImpl extends AbstractHibernateDAO<Process> implements Pro
return list(context, criteriaQuery, false, Process.class, -1, -1); return list(context, criteriaQuery, false, Process.class, -1, -1);
} }
@Override
public List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Process> criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot);
criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user));
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID)));
criteriaQuery.orderBy(orderList);
return list(context, criteriaQuery, false, Process.class, limit, offset);
}
@Override
public int countByUser(Context context, EPerson user) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Process> criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
Root<Process> processRoot = criteriaQuery.from(Process.class);
criteriaQuery.select(processRoot);
criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user));
return count(context, criteriaQuery, criteriaBuilder, processRoot);
}
} }

View File

@@ -455,4 +455,18 @@ public interface CollectionService
public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType)
throws SQLException, SearchServiceException; throws SQLException, SearchServiceException;
/**
* Returns a list of all collections for a specific entity type.
* NOTE: for better performance, this method retrieves its results from an index (cache)
* and does not query the database directly.
* This means that results may be stale or outdated until
* https://github.com/DSpace/DSpace/issues/2853 is resolved."
*
* @param context DSpace Context
* @param entityType limit the returned collection to those related to given entity type
* @return list of collections found
* @throws SearchServiceException if search error
*/
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
throws SearchServiceException;
} }

View File

@@ -8,12 +8,15 @@
package org.dspace.curate; package org.dspace.curate;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.DSpaceObject;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link Curation} script * The {@link ScriptConfiguration} for the {@link Curation} script
@@ -22,9 +25,6 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class CurationScriptConfiguration<T extends Curation> extends ScriptConfiguration<T> { public class CurationScriptConfiguration<T extends Curation> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -38,16 +38,37 @@ public class CurationScriptConfiguration<T extends Curation> extends ScriptConfi
} }
/** /**
* Only admin can run Curation script via the scripts and processes endpoints. * Only repository admins or admins of the target object can run Curation script via the scripts
* @param context The relevant DSpace context * and processes endpoints.
* @return True if currentUser is admin, otherwise false *
* @param context The relevant DSpace context
* @param commandLineParameters the parameters that will be used to start the process if known,
* <code>null</code> otherwise
* @return true if the currentUser is allowed to run the script with the specified parameters or
* at least in some case if the parameters are not yet known
*/ */
@Override @Override
public boolean isAllowedToExecute(Context context) { public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
try { try {
return authorizeService.isAdmin(context); if (commandLineParameters == null) {
return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context)
|| authorizeService.isItemAdmin(context);
} else if (commandLineParameters.stream()
.map(DSpaceCommandLineParameter::getName)
.noneMatch("-i"::equals)) {
return authorizeService.isAdmin(context);
} else {
String dspaceObjectID = commandLineParameters.stream()
.filter(parameter -> "-i".equals(parameter.getName()))
.map(DSpaceCommandLineParameter::getValue)
.findFirst()
.get();
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID);
return authorizeService.isAdmin(context, dso);
}
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); throw new RuntimeException(e);
} }
} }

View File

@@ -7,22 +7,14 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link IndexClient} script * The {@link ScriptConfiguration} for the {@link IndexClient} script
*/ */
public class IndexDiscoveryScriptConfiguration<T extends IndexClient> extends ScriptConfiguration<T> { public class IndexDiscoveryScriptConfiguration<T extends IndexClient> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -30,15 +22,6 @@ public class IndexDiscoveryScriptConfiguration<T extends IndexClient> extends Sc
return dspaceRunnableClass; return dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -18,6 +18,9 @@ import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder; import org.dspace.discovery.utils.DiscoverQueryBuilder;
@@ -73,35 +76,80 @@ public class SearchUtils {
searchService = null; searchService = null;
} }
/**
* Retrieves the Discovery Configuration for a null context, prefix and DSpace object.
* This will result in returning the default configuration
* @return the default configuration
*/
public static DiscoveryConfiguration getDiscoveryConfiguration() { public static DiscoveryConfiguration getDiscoveryConfiguration() {
return getDiscoveryConfiguration(null, null); return getDiscoveryConfiguration(null, null, null);
} }
public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { /**
return getDiscoveryConfiguration(null, dso); * Retrieves the Discovery Configuration with a null prefix for a DSpace object.
* @param context
* the dabase context
* @param dso
* the DSpace object
* @return the Discovery Configuration for the specified DSpace object
*/
public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) {
return getDiscoveryConfiguration(context, null, dso);
} }
/** /**
* Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A
* null prefix mean the normal query, other predefined values are workspace or workflow * null prefix mean the normal query, other predefined values are workspace or workflow
* *
*
* @param context
* the database context
* @param prefix * @param prefix
* the namespace of the configuration to lookup if any * the namespace of the configuration to lookup if any
* @param dso * @param dso
* the DSpaceObject * the DSpaceObject
* @return the discovery configuration for the specified scope * @return the discovery configuration for the specified scope
*/ */
public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix,
DSpaceObject dso) {
if (prefix != null) { if (prefix != null) {
return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix);
} else { } else {
return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); return getDiscoveryConfigurationByDSO(context, dso);
} }
} }
/**
* Retrieve the configuration for the current dspace object and all its parents and add it to the provided set
* @param context - The database context
* @param configurations - The set of configurations to add the retrieved configurations to
* @param prefix - The namespace of the configuration to lookup if any
* @param dso - The DSpace Object
* @return the set of configurations with additional retrieved ones for the dspace object and parents
* @throws SQLException
*/
public static Set<DiscoveryConfiguration> addDiscoveryConfigurationForParents(
Context context, Set<DiscoveryConfiguration> configurations, String prefix, DSpaceObject dso)
throws SQLException {
if (dso == null) {
configurations.add(getDiscoveryConfigurationByName(null));
return configurations;
}
if (prefix != null) {
configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle()));
} else {
configurations.add(getDiscoveryConfigurationByName(dso.getHandle()));
}
DSpaceObjectService<DSpaceObject> dSpaceObjectService = ContentServiceFactory.getInstance()
.getDSpaceObjectService(dso);
DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso);
return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject);
}
/** /**
* Return the discovery configuration identified by the specified name * Return the discovery configuration identified by the specified name
* *
* @param configurationName the configuration name assigned to the bean in the * @param configurationName the configuration name assigned to the bean in the
* discovery.xml * discovery.xml
* @return the discovery configuration * @return the discovery configuration
@@ -113,6 +161,18 @@ public class SearchUtils {
return configurationService.getDiscoveryConfiguration(configurationName); return configurationService.getDiscoveryConfiguration(configurationName);
} }
/**
* Return the discovery configuration for the provided DSO
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO
*/
public static DiscoveryConfiguration getDiscoveryConfigurationByDSO(
Context context, DSpaceObject dso) {
DiscoveryConfigurationService configurationService = getConfigurationService();
return configurationService.getDiscoveryDSOConfiguration(context, dso);
}
public static DiscoveryConfigurationService getConfigurationService() { public static DiscoveryConfigurationService getConfigurationService() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager return manager
@@ -127,47 +187,55 @@ public class SearchUtils {
* Method that retrieves a list of all the configuration objects from the given item * Method that retrieves a list of all the configuration objects from the given item
* A configuration object can be returned for each parent community/collection * A configuration object can be returned for each parent community/collection
* *
* @param context the database context
* @param item the DSpace item * @param item the DSpace item
* @return a list of configuration objects * @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Item item) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Context context, Item item)
throws SQLException {
List<Collection> collections = item.getCollections(); List<Collection> collections = item.getCollections();
return getAllDiscoveryConfigurations(null, collections, item); return getAllDiscoveryConfigurations(context, null, collections, item);
} }
/** /**
* Return all the discovery configuration applicable to the provided workspace item * Return all the discovery configuration applicable to the provided workspace item
*
* @param context
* @param witem a workspace item * @param witem a workspace item
* @return a list of discovery configuration * @return a list of discovery configuration
* @throws SQLException * @throws SQLException
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkspaceItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>(); List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection()); collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem());
} }
/** /**
* Return all the discovery configuration applicable to the provided workflow item * Return all the discovery configuration applicable to the provided workflow item
*
* @param context
* @param witem a workflow item * @param witem a workflow item
* @return a list of discovery configuration * @return a list of discovery configuration
* @throws SQLException * @throws SQLException
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkflowItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>(); List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection()); collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem());
} }
private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(String prefix, private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
String prefix,
List<Collection> collections, Item item) List<Collection> collections, Item item)
throws SQLException { throws SQLException {
Set<DiscoveryConfiguration> result = new HashSet<>(); Set<DiscoveryConfiguration> result = new HashSet<>();
for (Collection collection : collections) { for (Collection collection : collections) {
DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); addDiscoveryConfigurationForParents(context, result, prefix, collection);
result.add(configuration);
} }
//Add alwaysIndex configurations //Add alwaysIndex configurations

View File

@@ -53,10 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
if (bitstreams != null) { if (bitstreams != null) {
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
// Add _keyword and _filter fields which are necessary to support filtering and faceting
// for the file names
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
String description = bitstream.getDescription(); String description = bitstream.getDescription();
if ((description != null) && !description.isEmpty()) { if ((description != null) && !description.isEmpty()) {
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
// Add _keyword and _filter fields which are necessary to support filtering and
// faceting for the descriptions
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
description);
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
description);
} }
} }
} }
@@ -65,4 +75,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
} }
} }
} }
} }

View File

@@ -7,12 +7,23 @@
*/ */
package org.dspace.discovery.configuration; package org.dspace.discovery.configuration;
import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -22,9 +33,18 @@ import org.dspace.services.factory.DSpaceServicesFactory;
*/ */
public class DiscoveryConfigurationService { public class DiscoveryConfigurationService {
private static final Logger log = LogManager.getLogger();
private Map<String, DiscoveryConfiguration> map; private Map<String, DiscoveryConfiguration> map;
private Map<Integer, List<String>> toIgnoreMetadataFields = new HashMap<>(); private Map<Integer, List<String>> toIgnoreMetadataFields = new HashMap<>();
/**
* Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its
* own configuration, we take the one of the first parent that does.
* This cache ensures we do not have to go up the hierarchy every time.
*/
private final Map<UUID, DiscoveryConfiguration> comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>();
public Map<String, DiscoveryConfiguration> getMap() { public Map<String, DiscoveryConfiguration> getMap() {
return map; return map;
} }
@@ -41,25 +61,98 @@ public class DiscoveryConfigurationService {
this.toIgnoreMetadataFields = toIgnoreMetadataFields; this.toIgnoreMetadataFields = toIgnoreMetadataFields;
} }
public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { /**
* Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from
* the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will
* be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default
* configuration will be retrieved
*
* When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param indexableObject - The IndexableObject to retrieve the configuration for
* @return the discovery configuration for the provided IndexableObject.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) {
String name; String name;
if (dso == null) { if (indexableObject == null) {
name = "default"; return getDiscoveryConfiguration(null);
} else if (dso instanceof IndexableDSpaceObject) { } else if (indexableObject instanceof IndexableDSpaceObject) {
name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject());
} else { } else {
name = dso.getUniqueIndexID(); name = indexableObject.getUniqueIndexID();
} }
return getDiscoveryConfiguration(name); return getDiscoveryConfiguration(name);
} }
public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { /**
* Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO.
*/
public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) {
// Fall back to default configuration
if (dso == null) {
return getDiscoveryConfiguration(null, true);
}
// Attempt to retrieve cached configuration by UUID
if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) {
return comColToDiscoveryConfigurationMap.get(dso.getID());
}
DiscoveryConfiguration configuration;
// Attempt to retrieve configuration by DSO handle
configuration = getDiscoveryConfiguration(dso.getHandle(), false);
if (configuration == null) {
// Recurse up the Comm/Coll hierarchy until a configuration is found
DSpaceObjectService<DSpaceObject> dSpaceObjectService =
ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
DSpaceObject parentObject = null;
try {
parentObject = dSpaceObjectService.getParentObject(context, dso);
} catch (SQLException e) {
log.error(e);
}
configuration = getDiscoveryDSOConfiguration(context, parentObject);
}
// Cache the resulting configuration when the DSO is a Community or Collection
if (dso instanceof Community || dso instanceof Collection) {
comColToDiscoveryConfigurationMap.put(dso.getID(), configuration);
}
return configuration;
}
/**
* Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the
* default configuration will be returned.
* @param name - The name of the configuration to be retrieved
* @return the Discovery Configuration for the provided name, or default when none was found.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(String name) {
return getDiscoveryConfiguration(name, true);
}
/**
* Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration
* will be returned when no match is found. When useDefault is set to false, null will be returned when no match is
* found.
* @param name - The name of the configuration to retrieve
* @param useDefault - Whether the default configuration should be used when no match is found
* @return the configuration for the provided name
*/
public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) {
DiscoveryConfiguration result; DiscoveryConfiguration result;
result = StringUtils.isBlank(name) ? null : getMap().get(name); result = StringUtils.isBlank(name) ? null : getMap().get(name);
if (result == null) { if (result == null && useDefault) {
//No specific configuration, get the default one //No specific configuration, get the default one
result = getMap().get("default"); result = getMap().get("default");
} }
@@ -67,12 +160,23 @@ public class DiscoveryConfigurationService {
return result; return result;
} }
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, /**
final IndexableObject dso) { * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be
* checked for the provided name. When no match is found for the name, the configuration will be retrieved for the
* IndexableObject
*
* @param context - The database context
* @param configurationName - The name of the configuration to be retrieved
* @param indexableObject - The indexable object to retrieve the configuration for
* @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject
*/
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context,
String configurationName,
IndexableObject indexableObject) {
if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) {
return getMap().get(configurationName); return getMap().get(configurationName);
} else { } else {
return getDiscoveryConfiguration(dso); return getDiscoveryConfiguration(context, indexableObject);
} }
} }
@@ -92,13 +196,25 @@ public class DiscoveryConfigurationService {
return configs; return configs;
} }
/**
* @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet}
*/
public List<DiscoverySearchFilterFacet> getAllFacetsConfig() {
List<DiscoverySearchFilterFacet> configs = new ArrayList<>();
for (String key : map.keySet()) {
DiscoveryConfiguration config = map.get(key);
configs.addAll(config.getSidebarFacets());
}
return configs;
}
public static void main(String[] args) { public static void main(String[] args) {
System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size());
DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName( .getServiceByName(
DiscoveryConfigurationService.class DiscoveryConfigurationService.class
.getName(), .getName(),
DiscoveryConfigurationService.class); DiscoveryConfigurationService.class);
for (String key : mainService.getMap().keySet()) { for (String key : mainService.getMap().keySet()) {
System.out.println(key); System.out.println(key);
@@ -126,7 +242,7 @@ public class DiscoveryConfigurationService {
System.out.println("Recent submissions configuration:"); System.out.println("Recent submissions configuration:");
DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration
.getRecentSubmissionConfiguration(); .getRecentSubmissionConfiguration();
System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField());
System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax());

View File

@@ -9,6 +9,7 @@ package org.dspace.discovery.configuration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.annotation.Nullable;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -22,6 +23,11 @@ public class DiscoverySortConfiguration {
private List<DiscoverySortFieldConfiguration> sortFields = new ArrayList<DiscoverySortFieldConfiguration>(); private List<DiscoverySortFieldConfiguration> sortFields = new ArrayList<DiscoverySortFieldConfiguration>();
/**
* Default sort configuration to use when needed
*/
@Nullable private DiscoverySortFieldConfiguration defaultSortField;
public List<DiscoverySortFieldConfiguration> getSortFields() { public List<DiscoverySortFieldConfiguration> getSortFields() {
return sortFields; return sortFields;
} }
@@ -30,6 +36,14 @@ public class DiscoverySortConfiguration {
this.sortFields = sortFields; this.sortFields = sortFields;
} }
public DiscoverySortFieldConfiguration getDefaultSortField() {
return defaultSortField;
}
public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) {
this.defaultSortField = configuration;
}
public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) {
if (StringUtils.isBlank(sortField)) { if (StringUtils.isBlank(sortField)) {
return null; return null;

View File

@@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
final Collection collection = indexableCollection.getIndexedObject(); final Collection collection = indexableCollection.getIndexedObject();
// Retrieve configuration // Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration(); .getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>(); List<String> highlightedMetadataFields = new ArrayList<>();
@@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
return locations; return locations;
} }
} }

View File

@@ -69,7 +69,7 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
final Community community = indexableObject.getIndexedObject(); final Community community = indexableObject.getIndexedObject();
// Retrieve configuration // Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration(); .getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>(); List<String> highlightedMetadataFields = new ArrayList<>();
@@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
return locations; return locations;
} }
} }

View File

@@ -80,11 +80,13 @@ public abstract class InprogressSubmissionIndexFactoryImpl
// Add item metadata // Add item metadata
List<DiscoveryConfiguration> discoveryConfigurations; List<DiscoveryConfiguration> discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) { if (inProgressSubmission instanceof WorkflowItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) { } else if (inProgressSubmission instanceof WorkspaceItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkspaceItem) inProgressSubmission);
} else { } else {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
} }
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
indexableCollectionService.storeCommunityCollectionLocations(doc, locations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations);

View File

@@ -160,7 +160,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
} }
// Add the item metadata // Add the item metadata
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations);
//mandatory facet to show status on mydspace //mandatory facet to show status on mydspace

View File

@@ -64,6 +64,7 @@ public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl<IndexableMet
Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
// add read permission on doc for anonymous group // add read permission on doc for anonymous group
doc.addField("read", "g" + anonymousGroup.getID()); doc.addField("read", "g" + anonymousGroup.getID());
doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName);
return doc; return doc;
} }

View File

@@ -332,7 +332,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
} }
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (searchSortConfiguration.getDefaultSortField() != null) {
sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
} }
@@ -342,7 +344,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) {
String sortBy;// Attempt to find the default one, if none found we use SCORE String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score"; sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (searchSortConfiguration.getDefaultSortField() != null) {
sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) { if (StringUtils.isBlank(defaultSort.getMetadataField())) {

View File

@@ -15,8 +15,8 @@ import java.util.Date;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.DCDate;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
@@ -107,28 +107,30 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
LinkedList<MetadatumDTO> dayList = (LinkedList<MetadatumDTO>) day.contributeMetadata(t); LinkedList<MetadatumDTO> dayList = (LinkedList<MetadatumDTO>) day.contributeMetadata(t);
for (int i = 0; i < yearList.size(); i++) { for (int i = 0; i < yearList.size(); i++) {
DCDate dcDate = null; String resultDateString = "";
String dateString = ""; String dateString = "";
SimpleDateFormat resultFormatter = null;
if (monthList.size() > i && dayList.size() > i) { if (monthList.size() > i && dayList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() +
"-" + dayList.get(i).getValue(); "-" + dayList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM-dd");
} else if (monthList.size() > i) { } else if (monthList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM");
} else { } else {
dateString = yearList.get(i).getValue(); dateString = yearList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy");
} }
int j = 0; int j = 0;
// Use the first dcDate that has been formatted (Config should go from most specific to most lenient) // Use the first dcDate that has been formatted (Config should go from most specific to most lenient)
while (j < dateFormatsToAttempt.size()) { while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) {
String dateFormat = dateFormatsToAttempt.get(j); String dateFormat = dateFormatsToAttempt.get(j);
try { try {
SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
Date date = formatter.parse(dateString); Date date = formatter.parse(dateString);
dcDate = new DCDate(date); resultDateString = resultFormatter.format(date);
values.add(metadataFieldMapping.toDCValue(field, formatter.format(date)));
break;
} catch (ParseException e) { } catch (ParseException e) {
// Multiple dateformats can be configured, we don't want to print the entire stacktrace every // Multiple dateformats can be configured, we don't want to print the entire stacktrace every
// time one of those formats fails. // time one of those formats fails.
@@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
} }
j++; j++;
} }
if (dcDate == null) { if (StringUtils.isNotBlank(resultDateString)) {
values.add(metadataFieldMapping.toDCValue(field, resultDateString));
} else {
log.info( log.info(
"Failed parsing " + dateString + ", check " + "Failed parsing " + dateString + ", check " +
"the configured dataformats in config/spring/api/pubmed-integration.xml"); "the configured dataformats in config/spring/api/pubmed-integration.xml");

View File

@@ -7,13 +7,8 @@
*/ */
package org.dspace.orcid.script; package org.dspace.orcid.script;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Script configuration for {@link OrcidBulkPush}. * Script configuration for {@link OrcidBulkPush}.
@@ -24,20 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class OrcidBulkPushScriptConfiguration<T extends OrcidBulkPush> extends ScriptConfiguration<T> { public class OrcidBulkPushScriptConfiguration<T extends OrcidBulkPush> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Class<T> getDspaceRunnableClass() { public Class<T> getDspaceRunnableClass() {
return dspaceRunnableClass; return dspaceRunnableClass;

View File

@@ -129,6 +129,11 @@ public class ProcessServiceImpl implements ProcessService {
return processes; return processes;
} }
@Override
public List<Process> findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException {
return processDAO.findByUser(context, eperson, limit, offset);
}
@Override @Override
public void start(Context context, Process process) throws SQLException { public void start(Context context, Process process) throws SQLException {
process.setProcessStatus(ProcessStatus.RUNNING); process.setProcessStatus(ProcessStatus.RUNNING);
@@ -311,6 +316,11 @@ public class ProcessServiceImpl implements ProcessService {
return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date);
} }
@Override
public int countByUser(Context context, EPerson user) throws SQLException {
return processDAO.countByUser(context, user);
}
private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();

View File

@@ -37,7 +37,7 @@ public class ScriptServiceImpl implements ScriptService {
@Override @Override
public List<ScriptConfiguration> getScriptConfigurations(Context context) { public List<ScriptConfiguration> getScriptConfigurations(Context context) {
return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter(
scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null))
.sorted(Comparator.comparing(ScriptConfiguration::getName)) .sorted(Comparator.comparing(ScriptConfiguration::getName))
.collect(Collectors.toList()); .collect(Collectors.toList());
} }

View File

@@ -7,17 +7,28 @@
*/ */
package org.dspace.scripts.configuration; package org.dspace.scripts.configuration;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.DSpaceRunnable;
import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.BeanNameAware;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this
* and represent a script's configuration * and represent a script's configuration.
* By default script are available only to repository administrators script that have a broader audience
* must override the {@link #isAllowedToExecute(Context, List)} method.
*/ */
public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements BeanNameAware { public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements BeanNameAware {
@Autowired
protected AuthorizeService authorizeService;
/** /**
* The possible options for this script * The possible options for this script
*/ */
@@ -70,14 +81,23 @@ public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements B
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration
*/ */
public abstract void setDspaceRunnableClass(Class<T> dspaceRunnableClass); public abstract void setDspaceRunnableClass(Class<T> dspaceRunnableClass);
/** /**
* This method will return if the script is allowed to execute in the given context. This is by default set * This method will return if the script is allowed to execute in the given context. This is by default set
* to the currentUser in the context being an admin, however this can be overwritten by each script individually * to the currentUser in the context being an admin, however this can be overwritten by each script individually
* if different rules apply * if different rules apply
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param commandLineParameters the parameters that will be used to start the process if known,
* <code>null</code> otherwise
* @return A boolean indicating whether the script is allowed to execute or not * @return A boolean indicating whether the script is allowed to execute or not
*/ */
public abstract boolean isAllowedToExecute(Context context); public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
/** /**
* The getter for the options of the Script * The getter for the options of the Script

View File

@@ -255,4 +255,26 @@ public interface ProcessService {
*/ */
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date) List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
throws SQLException; throws SQLException;
/**
* Returns a list of all Process objects in the database by the given user.
*
* @param context The relevant DSpace context
* @param user The user to search for
* @param limit The limit for the amount of Processes returned
* @param offset The offset for the Processes to be returned
* @return The list of all Process objects in the Database
* @throws SQLException If something goes wrong
*/
List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException;
/**
* Count all the processes which is related to the given user.
*
* @param context The relevant DSpace context
* @param user The user to search for
* @return The number of results matching the query
* @throws SQLException If something goes wrong
*/
int countByUser(Context context, EPerson user) throws SQLException;
} }

View File

@@ -7,13 +7,8 @@
*/ */
package org.dspace.statistics.export; package org.dspace.statistics.export;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script
@@ -21,9 +16,6 @@ import org.springframework.beans.factory.annotation.Autowired;
public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedOpenUrlTracker> public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedOpenUrlTracker>
extends ScriptConfiguration<T> { extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -41,15 +33,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedO
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -7,13 +7,8 @@
*/ */
package org.dspace.submit.migration; package org.dspace.submit.migration;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
public class SubmissionFormsMigrationCliScriptConfiguration<T extends SubmissionFormsMigration> public class SubmissionFormsMigrationCliScriptConfiguration<T extends SubmissionFormsMigration>
extends ScriptConfiguration<T> { extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -38,15 +30,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration<T extends Submission
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -7,7 +7,12 @@
*/ */
package org.dspace.submit.migration; package org.dspace.submit.migration;
import java.util.List;
import org.apache.commons.cli.Options;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.configuration.ScriptConfiguration;
/** /**
* Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so * Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so
@@ -15,10 +20,37 @@ import org.dspace.core.Context;
* *
* @author Maria Verdonck (Atmire) on 05/01/2021 * @author Maria Verdonck (Atmire) on 05/01/2021
*/ */
public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration { public class SubmissionFormsMigrationScriptConfiguration<T extends SubmissionFormsMigration>
extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableClass;
@Override @Override
public boolean isAllowedToExecute(Context context) { public Class<T> getDspaceRunnableClass() {
return this.dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location");
options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location");
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
@Override
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
// Script is not allowed to be executed from REST side // Script is not allowed to be executed from REST side
return false; return false;
} }

View File

@@ -8,15 +8,11 @@
package org.dspace.subscriptions; package org.dspace.subscriptions;
import java.sql.SQLException;
import java.util.Objects; import java.util.Objects;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.AuthorizeServiceImpl;
import org.dspace.core.Context;
import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.DSpaceRunnable;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them
@@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration<T
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Autowired
private AuthorizeServiceImpl authorizeService;
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (Objects.isNull(options)) { if (Objects.isNull(options)) {

View File

@@ -122,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper
org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided
org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks
org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long!
org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \
the repository

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File; import java.io.File;
import java.nio.file.Files; import java.nio.file.Files;
@@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.flywaydb.core.internal.util.ExceptionUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@@ -46,6 +48,7 @@ import org.junit.Test;
public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private static final String ZIP_NAME = "saf.zip"; private static final String ZIP_NAME = "saf.zip";
private static final String PDF_NAME = "test.pdf";
private static final String publicationTitle = "A Tale of Two Cities"; private static final String publicationTitle = "A Tale of Two Cities";
private static final String personTitle = "Person Test"; private static final String personTitle = "Person Test";
@@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private Collection collection; private Collection collection;
private Path tempDir; private Path tempDir;
private Path workDir; private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before @Before
@Override @Override
@@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkMetadata(); checkMetadata();
checkMetadataWithAnotherSchema(); checkMetadataWithAnotherSchema();
checkBitstream(); checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
} }
@Test @Test
@@ -254,6 +262,23 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkRelationship(); checkRelationship();
} }
@Test
public void importItemByZipSafInvalidMimetype() throws Exception {
// use sample PDF file
Files.copy(getClass().getResourceAsStream("test.pdf"),
Path.of(tempDir.toString() + "/" + PDF_NAME));
String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(),
"-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString()
+ "/mapfile.out" };
try {
perfomImportScript(args);
} catch (Exception e) {
// should throw an exception due to invalid mimetype
assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass());
}
}
@Test @Test
public void resumeImportItemBySafWithMetadataOnly() throws Exception { public void resumeImportItemBySafWithMetadataOnly() throws Exception {
// create simple SAF // create simple SAF

View File

@@ -162,8 +162,8 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
return (B) this; return (B) this;
} }
/** /**
* Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson.
* READ permissions will be removed * If another ADMIN policy is in place for an eperson it will be replaced
* *
* @param dso * @param dso
* the DSpaceObject on which grant the permission * the DSpaceObject on which grant the permission

View File

@@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
private Community community; private Community community;
protected CommunityBuilder(Context context) { protected CommunityBuilder(Context context) {
super(context); super(context);
} }
public static CommunityBuilder createCommunity(final Context context) { public static CommunityBuilder createCommunity(final Context context) {
CommunityBuilder builder = new CommunityBuilder(context); CommunityBuilder builder = new CommunityBuilder(context);
return builder.create(); return builder.create(null);
}
public static CommunityBuilder createCommunity(final Context context, String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.create(handle);
} }
private CommunityBuilder create() { private CommunityBuilder create(String handle) {
return createSubCommunity(context, null); return createSubCommunity(context, null, handle);
} }
public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { public static CommunityBuilder createSubCommunity(final Context context, final Community parent) {
CommunityBuilder builder = new CommunityBuilder(context); CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent); return builder.createSub(parent, null);
} }
private CommunityBuilder createSub(final Community parent) { public static CommunityBuilder createSubCommunity(final Context context, final Community parent,
final String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent, handle);
}
private CommunityBuilder createSub(final Community parent, String handle) {
try { try {
community = communityService.create(parent, context); community = communityService.create(parent, context, handle);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
return null; return null;
@@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
@Override @Override
public Community build() { public Community build() {
try { try {
communityService.update(context, community); communityService.update(context, community);
context.dispatchEvents(); context.dispatchEvents();

View File

@@ -353,9 +353,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
} }
/** /**
* Create an admin group for the collection with the specified members * Assign the admin permission to the specified eperson
* *
* @param ePerson epersons to add to the admin group * @param ePerson the eperson that will get the ADMIN permission on the item
* @return this builder * @return this builder
* @throws SQLException * @throws SQLException
* @throws AuthorizeException * @throws AuthorizeException

View File

@@ -113,6 +113,9 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
} }
public static void deleteProcess(Integer integer) throws SQLException, IOException { public static void deleteProcess(Integer integer) throws SQLException, IOException {
if (integer == null) {
return;
}
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
Process process = processService.find(c, integer); Process process = processService.find(c, integer);

View File

@@ -725,9 +725,6 @@ public class CollectionTest extends AbstractDSpaceObjectTest {
// Allow Item REMOVE perms // Allow Item REMOVE perms
doNothing().when(authorizeServiceSpy) doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE));
// Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion)
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE));
// create & add item first // create & add item first
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();

View File

@@ -1189,8 +1189,6 @@ public class ItemTest extends AbstractDSpaceObjectTest {
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true);
// Allow Item DELETE perms // Allow Item DELETE perms
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE);
// Allow Item WRITE perms (required to first delete identifiers)
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE);
UUID id = item.getID(); UUID id = item.getID();
itemService.delete(context, item); itemService.delete(context, item);

View File

@@ -7,14 +7,18 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
@@ -24,6 +28,7 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.PoolTaskBuilder;
import org.dspace.builder.WorkflowItemBuilder; import org.dspace.builder.WorkflowItemBuilder;
@@ -39,6 +44,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableClaimedTask;
import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.IndexableItem;
@@ -731,6 +738,64 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
} }
} }
/**
* Test designed to check if default sort option for Discovery is working, using <code>workspace</code>
* DiscoveryConfiguration <br/>
* <b>Note</b>: this test will be skipped if <code>workspace</code> do not have a default sort option set and of
* metadataType <code>dc_date_accessioned</code> or <code>lastModified</code>
* @throws SearchServiceException
*/
@Test
public void searchWithDefaultSortServiceTest() throws SearchServiceException {
DiscoveryConfiguration workspaceConf =
SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null);
// Skip if no default sort option set for workspaceConf
if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) {
return;
}
DiscoverySortFieldConfiguration defaultSortField =
workspaceConf.getSearchSortConfiguration().getDefaultSortField();
// Populate the testing objects: create items in eperson's workspace and perform search in it
int numberItems = 10;
context.turnOffAuthorisationSystem();
EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build();
context.setCurrentUser(submitter);
Community community = CommunityBuilder.createCommunity(context).build();
Collection collection = CollectionBuilder.createCollection(context, community).build();
for (int i = 0; i < numberItems; i++) {
ItemBuilder.createItem(context, collection)
.withTitle("item " + i)
.build();
}
context.restoreAuthSystemState();
// Build query with default parameters (except for workspaceConf)
DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder()
.buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null,
null,null);
DiscoverResult result = searchService.search(context, discoverQuery);
/*
// code example for testing against sort by dc_date_accessioned
LinkedList<String> dc_date_accesioneds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getMetadata())
.map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned"))
.map(m -> m.getValue()).findFirst().orElse("")
)
.collect(Collectors.toCollection(LinkedList::new));
}*/
LinkedList<String> lastModifieds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getLastModified().toString())
.collect(Collectors.toCollection(LinkedList::new));
assertFalse(lastModifieds.isEmpty());
for (int i = 1; i < lastModifieds.size() - 1; i++) {
assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0);
}
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1); assertSearchQuery(resourceType, size, size, 0, -1);
} }

View File

@@ -8,21 +8,13 @@
package org.dspace.scripts; package org.dspace.scripts;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.scripts.impl.MockDSpaceRunnableScript; import org.dspace.scripts.impl.MockDSpaceRunnableScript;
import org.springframework.beans.factory.annotation.Autowired;
public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableScript> extends ScriptConfiguration<T> { public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableScript> extends ScriptConfiguration<T> {
@Autowired
private AuthorizeService authorizeService;
private Class<T> dspaceRunnableClass; private Class<T> dspaceRunnableClass;
@Override @Override
@@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableS
this.dspaceRunnableClass = dspaceRunnableClass; this.dspaceRunnableClass = dspaceRunnableClass;
} }
@Override
public boolean isAllowedToExecute(Context context) {
try {
return authorizeService.isAdmin(context);
} catch (SQLException e) {
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
}
}
@Override @Override
public Options getOptions() { public Options getOptions() {
if (options == null) { if (options == null) {

View File

@@ -118,7 +118,8 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
} }
/** /**
* Constructs a solr search URL. * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2.
* https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2
* *
* @param query the search terms * @param query the search terms
* @param manifestId the id of the manifest in which to search * @param manifestId the id of the manifest in which to search
@@ -132,8 +133,9 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
solrQuery.set("hl.ocr.fl", "ocr_text"); solrQuery.set("hl.ocr.fl", "ocr_text");
solrQuery.set("hl.ocr.contextBlock", "line"); solrQuery.set("hl.ocr.contextBlock", "line");
solrQuery.set("hl.ocr.contextSize", "2"); solrQuery.set("hl.ocr.contextSize", "2");
solrQuery.set("hl.snippets", "10"); solrQuery.set("hl.snippets", "8192");
solrQuery.set("hl.ocr.trackPages", "off"); solrQuery.set("hl.ocr.maxPassages", "8192");
solrQuery.set("hl.ocr.trackPages", "on");
solrQuery.set("hl.ocr.limitBlock","page"); solrQuery.set("hl.ocr.limitBlock","page");
solrQuery.set("hl.ocr.absoluteHighlights", "true"); solrQuery.set("hl.ocr.absoluteHighlights", "true");

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.xoai.app.plugins;
import java.sql.SQLException;
import java.util.List;
import com.lyncode.xoai.dataprovider.xml.xoai.Element;
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
import org.dspace.access.status.factory.AccessStatusServiceFactory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin;
import org.dspace.xoai.util.ItemUtils;
/**
* AccessStatusElementItemCompilePlugin aims to add structured information about the
* Access Status of the item (if any).
* The xoai document will be enriched with a structure like that
* <pre>
* {@code
* <element name="others">
* <element name="access-status">
* <field name="value">open.access</field>
* </element>
* </element>
* }
* </pre>
* Returning Values are based on:
* @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper
*/
public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin {
@Override
public Metadata additionalMetadata(Context context, Metadata metadata, Item item) {
AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService();
try {
String accessStatusType;
accessStatusType = accessStatusService.getAccessStatus(context, item);
Element accessStatus = ItemUtils.create("access-status");
accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType));
Element others;
List<Element> elements = metadata.getElement();
if (ItemUtils.getElement(elements, "others") != null) {
others = ItemUtils.getElement(elements, "others");
} else {
others = ItemUtils.create("others");
}
others.getElement().add(accessStatus);
} catch (SQLException e) {
e.printStackTrace();
}
return metadata;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.repository.BitstreamRestRepository;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* REST controller for handling bulk updates to Bitstream resources.
* <p>
* This controller is responsible for handling requests to the bitstream category, which allows for updating
* multiple bitstream resources in a single operation.
* </p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@RestController
@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME)
public class BitstreamCategoryRestController {
@Autowired
BitstreamRestRepository bitstreamRestRepository;
/**
* Handles PATCH requests to the bitstream category for bulk updates of bitstream resources.
*
* @param request the HTTP request object.
* @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied.
* @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a
* HTTP 204 No Content response since currently only a delete operation is supported.
* @throws SQLException if an error occurs while accessing the database.
* @throws AuthorizeException if the user is not authorized to perform the requested operation.
*/
@RequestMapping(method = RequestMethod.PATCH)
public ResponseEntity<RepresentationModel<?>> patch(HttpServletRequest request,
@RequestBody(required = true) JsonNode jsonNode)
throws SQLException, AuthorizeException {
Context context = obtainContext(request);
bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode);
return ResponseEntity.noContent().build();
}
}

View File

@@ -176,7 +176,7 @@ public class OpenSearchController {
if (dsoObject != null) { if (dsoObject != null) {
container = scopeResolver.resolveScope(context, dsoObject); container = scopeResolver.resolveScope(context, dsoObject);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso("site", container); .getDiscoveryConfiguration(context, container);
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray( .toArray(

View File

@@ -12,18 +12,23 @@ import java.util.List;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.ConverterService; import org.dspace.app.rest.converter.ConverterService;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ProcessRest;
import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.ScriptRest;
import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.model.hateoas.ProcessResource;
import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.repository.ScriptRestRepository;
import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
import org.dspace.scripts.service.ScriptService;
import org.dspace.services.RequestService; import org.dspace.services.RequestService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ControllerUtils; import org.springframework.data.rest.webmvc.ControllerUtils;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.hateoas.RepresentationModel; import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PathVariable;
@@ -48,6 +53,9 @@ public class ScriptProcessesController {
@Autowired @Autowired
private ScriptRestRepository scriptRestRepository; private ScriptRestRepository scriptRestRepository;
@Autowired
private ScriptService scriptService;
@Autowired @Autowired
private RequestService requestService; private RequestService requestService;
@@ -59,8 +67,8 @@ public class ScriptProcessesController {
* @return The ProcessResource object for the created process * @return The ProcessResource object for the created process
* @throws Exception If something goes wrong * @throws Exception If something goes wrong
*/ */
@RequestMapping(method = RequestMethod.POST) @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasAuthority('AUTHENTICATED')")
public ResponseEntity<RepresentationModel<?>> startProcess( public ResponseEntity<RepresentationModel<?>> startProcess(
@PathVariable(name = "name") String scriptName, @PathVariable(name = "name") String scriptName,
@RequestParam(name = "file", required = false) List<MultipartFile> files) @RequestParam(name = "file", required = false) List<MultipartFile> files)
@@ -75,4 +83,21 @@ public class ScriptProcessesController {
return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource);
} }
@RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE)
@PreAuthorize("hasAuthority('AUTHENTICATED')")
public ResponseEntity<RepresentationModel<?>> startProcessInvalidMimeType(
@PathVariable(name = "name") String scriptName)
throws Exception {
if (log.isTraceEnabled()) {
log.trace("Starting Process for Script with name: " + scriptName);
}
Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest());
ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName);
if (scriptToExecute == null) {
throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found");
}
throw new DSpaceBadRequestException("Invalid mimetype");
}
} }

View File

@@ -7,12 +7,17 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.projection.Projection;
import org.dspace.browse.BrowseIndex; import org.dspace.browse.BrowseIndex;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.sort.SortException; import org.dspace.sort.SortException;
import org.dspace.sort.SortOption; import org.dspace.sort.SortOption;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -30,18 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { public BrowseIndexRest convert(BrowseIndex obj, Projection projection) {
BrowseIndexRest bir = new BrowseIndexRest(); BrowseIndexRest bir = new BrowseIndexRest();
bir.setProjection(projection); bir.setProjection(projection);
bir.setId(obj.getName());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setMetadataBrowse(obj.isMetadataIndex());
List<String> metadataList = new ArrayList<String>(); List<String> metadataList = new ArrayList<String>();
if (obj.isMetadataIndex()) { String id = obj.getName();
if (obj instanceof DSpaceControlledVocabularyIndex) {
DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj;
metadataList = new ArrayList<>(vocObj.getMetadataFields());
id = vocObj.getVocabulary().getPluginInstanceName();
bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName());
bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName());
bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL);
} else if (obj.isMetadataIndex()) {
for (String s : obj.getMetadata().split(",")) { for (String s : obj.getMetadata().split(",")) {
metadataList.add(s.trim()); metadataList.add(s.trim());
} }
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_VALUE_LIST);
} else { } else {
metadataList.add(obj.getSortOption().getMetadata()); metadataList.add(obj.getSortOption().getMetadata());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_FLAT);
} }
bir.setId(id);
bir.setMetadataList(metadataList); bir.setMetadataList(metadataList);
List<BrowseIndexRest.SortOption> sortOptionsList = new ArrayList<BrowseIndexRest.SortOption>(); List<BrowseIndexRest.SortOption> sortOptionsList = new ArrayList<BrowseIndexRest.SortOption>();
@@ -52,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
} catch (SortException e) { } catch (SortException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
bir.setSortOptions(sortOptionsList); if (!bir.getBrowseType().equals(BROWSE_TYPE_HIERARCHICAL)) {
bir.setSortOptions(sortOptionsList);
}
return bir; return bir;
} }

View File

@@ -80,6 +80,15 @@ public class DiscoverConfigurationConverter
sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name()); sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name());
searchConfigurationRest.addSortOption(sortOption); searchConfigurationRest.addSortOption(sortOption);
} }
DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField();
if (defaultSortField != null) {
SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption();
sortOption.setName(defaultSortField.getMetadataField());
sortOption.setActualName(defaultSortField.getType());
sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name());
searchConfigurationRest.setDefaultSortOption(sortOption);
}
} }
} }

View File

@@ -174,6 +174,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
GroupNameNotProvidedException.class, GroupNameNotProvidedException.class,
GroupHasPendingWorkflowTasksException.class, GroupHasPendingWorkflowTasksException.class,
PasswordNotValidException.class, PasswordNotValidException.class,
RESTBitstreamNotFoundException.class
}) })
protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
TranslatableException ex) throws IOException { TranslatableException ex) throws IOException {

View File

@@ -0,0 +1,51 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.exception;
import java.text.MessageFormat;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
/**
* <p>Extend {@link UnprocessableEntityException} to provide a specific error message
* in the REST response. The error message is added to the response in
* {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException},
* hence it should not contain sensitive or security-compromising info.</p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException {
public static String uuid;
/**
* @param formatStr string with placeholders, ideally obtained using {@link I18nUtil}
* @return message with bitstream id substituted
*/
private static String formatMessage(String formatStr) {
MessageFormat fmt = new MessageFormat(formatStr);
return fmt.format(new String[]{uuid});
}
public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message";
public RESTBitstreamNotFoundException(String uuid) {
super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY)));
RESTBitstreamNotFoundException.uuid = uuid;
}
public String getMessageKey() {
return MESSAGE_KEY;
}
public String getLocalizedMessage(Context context) {
return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context));
}
}

View File

@@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory<BrowseEntryResourc
UriComponentsBuilder baseLink = uriBuilder( UriComponentsBuilder baseLink = uriBuilder(
getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(), getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(),
English.plural(bix.getType()), bix.getId(), English.plural(bix.getType()), bix.getId(),
BrowseIndexRest.ITEMS, null, null)); BrowseIndexRest.LINK_ITEMS, null, null));
addFilterParams(baseLink, data); addFilterParams(baseLink, data);
list.add(buildLink(BrowseIndexRest.ITEMS, list.add(buildLink(BrowseIndexRest.LINK_ITEMS,
baseLink.build().encode().toUriString())); baseLink.build().encode().toUriString()));
} }
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.model;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.RestResourceController;
@@ -20,11 +21,11 @@ import org.dspace.app.rest.RestResourceController;
*/ */
@LinksRest(links = { @LinksRest(links = {
@LinkRest( @LinkRest(
name = BrowseIndexRest.ITEMS, name = BrowseIndexRest.LINK_ITEMS,
method = "listBrowseItems" method = "listBrowseItems"
), ),
@LinkRest( @LinkRest(
name = BrowseIndexRest.ENTRIES, name = BrowseIndexRest.LINK_ENTRIES,
method = "listBrowseEntries" method = "listBrowseEntries"
) )
}) })
@@ -35,20 +36,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
public static final String CATEGORY = RestAddressableModel.DISCOVER; public static final String CATEGORY = RestAddressableModel.DISCOVER;
public static final String ITEMS = "items"; public static final String LINK_ITEMS = "items";
public static final String ENTRIES = "entries"; public static final String LINK_ENTRIES = "entries";
public static final String LINK_VOCABULARY = "vocabulary";
boolean metadataBrowse; // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types,
// etc. the second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_VALUE_LIST = "valueList";
// if the browse index has one level: the full list of items
public static final String BROWSE_TYPE_FLAT = "flatBrowse";
// if the browse index should display the vocabulary tree. The 1st level shows the tree.
// The second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse";
// Shared fields
String browseType;
@JsonProperty(value = "metadata") @JsonProperty(value = "metadata")
List<String> metadataList; List<String> metadataList;
// Single browse index fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String dataType; String dataType;
@JsonInclude(JsonInclude.Include.NON_NULL)
List<SortOption> sortOptions; List<SortOption> sortOptions;
@JsonInclude(JsonInclude.Include.NON_NULL)
String order; String order;
// Hierarchical browse fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String facetType;
@JsonInclude(JsonInclude.Include.NON_NULL)
String vocabulary;
@JsonIgnore @JsonIgnore
@Override @Override
public String getCategory() { public String getCategory() {
@@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
return NAME; return NAME;
} }
public boolean isMetadataBrowse() {
return metadataBrowse;
}
public void setMetadataBrowse(boolean metadataBrowse) {
this.metadataBrowse = metadataBrowse;
}
public List<String> getMetadataList() { public List<String> getMetadataList() {
return metadataList; return metadataList;
} }
@@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
this.sortOptions = sortOptions; this.sortOptions = sortOptions;
} }
/**
* - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names,
* subjects, types, etc. the second level is the actual list of items linked to a specific entry
* - flatBrowse if the browse index has one level: the full list of items
* - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree.
* The second level is the actual list of items linked to a specific entry
*/
public void setBrowseType(String browseType) {
this.browseType = browseType;
}
public String getBrowseType() {
return browseType;
}
public void setFacetType(String facetType) {
this.facetType = facetType;
}
public String getFacetType() {
return facetType;
}
public void setVocabulary(String vocabulary) {
this.vocabulary = vocabulary;
}
public String getVocabulary() {
return vocabulary;
}
@Override @Override
public Class getController() { public Class getController() {
return RestResourceController.class; return RestResourceController.class;

View File

@@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
private List<Filter> filters = new LinkedList<>(); private List<Filter> filters = new LinkedList<>();
private List<SortOption> sortOptions = new LinkedList<>(); private List<SortOption> sortOptions = new LinkedList<>();
private SortOption defaultSortOption;
public String getCategory() { public String getCategory() {
return CATEGORY; return CATEGORY;
} }
@@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
return sortOptions; return sortOptions;
} }
public SortOption getDefaultSortOption() {
return defaultSortOption;
}
public void setDefaultSortOption(SortOption defaultSortOption) {
this.defaultSortOption = defaultSortOption;
}
@Override @Override
public boolean equals(Object object) { public boolean equals(Object object) {
return (object instanceof SearchConfigurationRest && return (object instanceof SearchConfigurationRest &&

View File

@@ -7,9 +7,20 @@
*/ */
package org.dspace.app.rest.model.hateoas; package org.dspace.app.rest.model.hateoas;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import org.atteo.evo.inflector.English;
import org.dspace.app.rest.RestResourceController;
import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.model.VocabularyRest;
import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource;
import org.dspace.app.rest.utils.Utils; import org.dspace.app.rest.utils.Utils;
import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.factory.ContentAuthorityServiceFactory;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.springframework.hateoas.Link;
import org.springframework.web.util.UriComponentsBuilder;
/** /**
* Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource
@@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils;
*/ */
@RelNameDSpaceResource(BrowseIndexRest.NAME) @RelNameDSpaceResource(BrowseIndexRest.NAME)
public class BrowseIndexResource extends DSpaceResource<BrowseIndexRest> { public class BrowseIndexResource extends DSpaceResource<BrowseIndexRest> {
public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { public BrowseIndexResource(BrowseIndexRest bix, Utils utils) {
super(bix, utils); super(bix, utils);
// TODO: the following code will force the embedding of items and // TODO: the following code will force the embedding of items and
// entries in the browseIndex we need to find a way to populate the rels // entries in the browseIndex we need to find a way to populate the rels
// array from the request/projection right now it is always null // array from the request/projection right now it is always null
// super(bix, utils, "items", "entries"); // super(bix, utils, "items", "entries");
if (bix.isMetadataBrowse()) { if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES));
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) {
ChoiceAuthorityService choiceAuthorityService =
ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService();
ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary());
UriComponentsBuilder baseLink = linkTo(
methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null,
null, VocabularyRest.CATEGORY,
English.plural(VocabularyRest.NAME), source.getPluginInstanceName(),
"", null, null)).toUriComponentsBuilder();
add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY));
} }
add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS));
} }
} }

View File

@@ -15,9 +15,12 @@ import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter; import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.JsonPatchConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
@@ -38,6 +41,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -72,6 +76,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
@Autowired @Autowired
private HandleService handleService; private HandleService handleService;
@Autowired
ConfigurationService configurationService;
@Autowired @Autowired
public BitstreamRestRepository(BitstreamService dsoService) { public BitstreamRestRepository(BitstreamService dsoService) {
super(dsoService); super(dsoService);
@@ -248,4 +255,25 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
return converter.toRest(targetBundle, utils.obtainProjection()); return converter.toRest(targetBundle, utils.obtainProjection());
} }
/**
* Method that will transform the provided PATCH json body into a list of operations.
* The operations will be handled by a supporting class resolved by the
* {@link org.dspace.app.rest.repository.patch.ResourcePatch#patch} method.
*
* @param context The context
* @param jsonNode the json body provided from the request body
*/
public void patchBitstreamsInBulk(Context context, JsonNode jsonNode) throws SQLException {
int operationsLimit = configurationService.getIntProperty("rest.patch.operations.limit", 1000);
ObjectMapper mapper = new ObjectMapper();
JsonPatchConverter patchConverter = new JsonPatchConverter(mapper);
Patch patch = patchConverter.convert(jsonNode);
if (patch.getOperations().size() > operationsLimit) {
throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " +
operationsLimit);
}
resourcePatch.patch(obtainContext(), null, patch.getOperations());
context.commit();
}
} }

View File

@@ -40,7 +40,7 @@ import org.springframework.stereotype.Component;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES)
public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository { implements LinkRestRepository {
@@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
@Override @Override
public boolean isEmbeddableRelation(Object data, String name) { public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data; BrowseIndexRest bir = (BrowseIndexRest) data;
if (bir.isMetadataBrowse() && "entries".equals(name)) { if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) &&
name.equals(BrowseIndexRest.LINK_ENTRIES)) {
return true; return true;
} }
return false; return false;

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.repository; package org.dspace.app.rest.repository;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@@ -17,7 +18,10 @@ import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex; import org.dspace.browse.BrowseIndex;
import org.dspace.browse.CrossLinks; import org.dspace.browse.CrossLinks;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
@@ -31,26 +35,48 @@ import org.springframework.stereotype.Component;
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME)
public class BrowseIndexRestRepository extends DSpaceRestRepository<BrowseIndexRest, String> { public class BrowseIndexRestRepository extends DSpaceRestRepository<BrowseIndexRest, String> {
@Autowired
private ChoiceAuthorityService choiceAuthorityService;
@Override @Override
@PreAuthorize("permitAll()") @PreAuthorize("permitAll()")
public BrowseIndexRest findOne(Context context, String name) { public BrowseIndexRest findOne(Context context, String name) {
BrowseIndexRest bi = null; BrowseIndexRest bi = createFromMatchingBrowseIndex(name);
if (bi == null) {
bi = createFromMatchingVocabulary(name);
}
return bi;
}
private BrowseIndexRest createFromMatchingVocabulary(String name) {
DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name);
if (vocabularyIndex != null) {
return converter.toRest(vocabularyIndex, utils.obtainProjection());
}
return null;
}
private BrowseIndexRest createFromMatchingBrowseIndex(String name) {
BrowseIndex bix; BrowseIndex bix;
try { try {
bix = BrowseIndex.getBrowseIndex(name); bix = BrowseIndex.getBrowseIndex(name);
} catch (BrowseException e) { } catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
if (bix != null) { if (bix != null) {
bi = converter.toRest(bix, utils.obtainProjection()); return converter.toRest(bix, utils.obtainProjection());
} }
return bi; return null;
} }
@Override @Override
public Page<BrowseIndexRest> findAll(Context context, Pageable pageable) { public Page<BrowseIndexRest> findAll(Context context, Pageable pageable) {
try { try {
List<BrowseIndex> indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); List<BrowseIndex> indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices()));
choiceAuthorityService.getChoiceAuthoritiesNames()
.stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null)
.forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name)));
return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection());
} catch (BrowseException e) { } catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);

View File

@@ -42,7 +42,7 @@ import org.springframework.stereotype.Component;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS)
public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository { implements LinkRestRepository {
@@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
@Override @Override
public boolean isEmbeddableRelation(Object data, String name) { public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data; BrowseIndexRest bir = (BrowseIndexRest) data;
if (!bir.isMetadataBrowse() && "items".equals(name)) { if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) &&
name.equals(BrowseIndexRest.LINK_ITEMS)) {
return true; return true;
} }
return false; return false;

View File

@@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
} }
@@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Context context = obtainContext(); Context context = obtainContext();
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null; DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null; DiscoverQuery discoverQuery = null;
@@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration);
} }
@@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName); query, searchFilters, dsoTypes, page, facetName);
@@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Pageable page = PageRequest.of(1, 1); Pageable page = PageRequest.of(1, 1);
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null; DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null; DiscoverQuery discoverQuery = null;

View File

@@ -14,6 +14,7 @@ import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
@@ -45,10 +46,10 @@ import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
* This is the repository responsible to manage MetadataField Rest object * This is the repository responsible to manage MetadataField Rest object
* *
@@ -135,13 +136,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
@Parameter(value = "exactName", required = false) String exactName, @Parameter(value = "exactName", required = false) String exactName,
Pageable pageable) throws SQLException { Pageable pageable) throws SQLException {
Context context = obtainContext(); Context context = obtainContext();
long totalElements = 0;
List<MetadataField> matchingMetadataFields = new ArrayList<>(); List<MetadataField> matchingMetadataFields = new ArrayList<>();
if (StringUtils.isBlank(exactName)) { if (StringUtils.isBlank(exactName)) {
// Find matches in Solr Search core // Find matches in Solr Search core
DiscoverQuery discoverQuery = DiscoverQuery discoverQuery =
this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable);
try { try {
DiscoverResult searchResult = searchService.search(context, null, discoverQuery); DiscoverResult searchResult = searchService.search(context, null, discoverQuery);
for (IndexableObject object : searchResult.getIndexableObjects()) { for (IndexableObject object : searchResult.getIndexableObjects()) {
@@ -149,6 +151,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject()); matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject());
} }
} }
totalElements = searchResult.getTotalSearchResults();
} catch (SearchServiceException e) { } catch (SearchServiceException e) {
log.error("Error while searching with Discovery", e); log.error("Error while searching with Discovery", e);
throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage()); throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage());
@@ -163,10 +166,11 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.'); MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.');
if (exactMatchingMdField != null) { if (exactMatchingMdField != null) {
matchingMetadataFields.add(exactMatchingMdField); matchingMetadataFields.add(exactMatchingMdField);
totalElements = 1;
} }
} }
return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection()); return converter.toRestPage(matchingMetadataFields, pageable, totalElements, utils.obtainProjection());
} }
/** /**
@@ -182,7 +186,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
* @throws SQLException If DB error * @throws SQLException If DB error
*/ */
private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName, private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName,
String qualifierName, String query) throws SQLException { String qualifierName, String query, Pageable pageable) throws SQLException {
List<String> filterQueries = new ArrayList<>(); List<String> filterQueries = new ArrayList<>();
if (StringUtils.isNotBlank(query)) { if (StringUtils.isNotBlank(query)) {
if (query.split("\\.").length > 3) { if (query.split("\\.").length > 3) {
@@ -210,6 +214,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
DiscoverQuery discoverQuery = new DiscoverQuery(); DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()])); discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()]));
Iterator<Sort.Order> orderIterator = pageable.getSort().iterator();
if (orderIterator.hasNext()) {
Sort.Order order = orderIterator.next();
discoverQuery.setSortField(order.getProperty() + "_sort",
order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc :
DiscoverQuery.SORT_ORDER.desc);
}
discoverQuery.setStart(Math.toIntExact(pageable.getOffset()));
discoverQuery.setMaxResults(pageable.getPageSize());
return discoverQuery; return discoverQuery;
} }
@@ -247,10 +260,18 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
if (isBlank(metadataFieldRest.getElement())) { if (isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
} else if (!metadataFieldRest.getElement().matches("^[^. ,]{1,64}$")) {
throw new UnprocessableEntityException(
"metadata element (in request body) cannot contain dots, commas or spaces and should be smaller than" +
" 64 characters");
} }
if (isBlank(metadataFieldRest.getQualifier())) { if (isBlank(metadataFieldRest.getQualifier())) {
metadataFieldRest.setQualifier(null); metadataFieldRest.setQualifier(null);
} else if (!metadataFieldRest.getQualifier().matches("^[^. ,]{1,64}$")) {
throw new UnprocessableEntityException(
"metadata qualifier (in request body) cannot contain dots, commas or spaces and should be smaller" +
" than 64 characters");
} }
// create // create
@@ -300,24 +321,26 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
try { try {
metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class); metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e); throw new DSpaceBadRequestException("Cannot parse JSON in request body", e);
} }
if (metadataFieldRest == null || isBlank(metadataFieldRest.getElement())) { MetadataField metadataField = metadataFieldService.find(context, id);
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank"); if (metadataField == null) {
throw new UnprocessableEntityException("metadata field with id: " + id + " not found");
}
if (!Objects.equals(metadataFieldRest.getElement(), metadataField.getElement())) {
throw new UnprocessableEntityException("Metadata element cannot be updated.");
}
if (!Objects.equals(metadataFieldRest.getQualifier(), metadataField.getQualifier())) {
throw new UnprocessableEntityException("Metadata qualifier cannot be updated.");
} }
if (!Objects.equals(id, metadataFieldRest.getId())) { if (!Objects.equals(id, metadataFieldRest.getId())) {
throw new UnprocessableEntityException("ID in request body doesn't match path ID"); throw new UnprocessableEntityException("ID in request body doesn't match path ID");
} }
MetadataField metadataField = metadataFieldService.find(context, id);
if (metadataField == null) {
throw new ResourceNotFoundException("metadata field with id: " + id + " not found");
}
metadataField.setElement(metadataFieldRest.getElement());
metadataField.setQualifier(metadataFieldRest.getQualifier());
metadataField.setScopeNote(metadataFieldRest.getScopeNote()); metadataField.setScopeNote(metadataFieldRest.getScopeNote());
try { try {

View File

@@ -93,6 +93,10 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
// validate fields // validate fields
if (isBlank(metadataSchemaRest.getPrefix())) { if (isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank"); throw new UnprocessableEntityException("metadata schema name cannot be blank");
} else if (!metadataSchemaRest.getPrefix().matches("^[^. ,]{1,32}$")) {
throw new UnprocessableEntityException(
"metadata schema namespace cannot contain dots, commas or spaces and should be smaller than" +
" 32 characters");
} }
if (isBlank(metadataSchemaRest.getNamespace())) { if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
@@ -142,11 +146,16 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
try { try {
metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class); metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e); throw new DSpaceBadRequestException("Cannot parse JSON in request body", e);
} }
if (metadataSchemaRest == null || isBlank(metadataSchemaRest.getPrefix())) { MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
throw new UnprocessableEntityException("metadata schema name cannot be blank"); if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
if (!Objects.equals(metadataSchemaRest.getPrefix(), metadataSchema.getName())) {
throw new UnprocessableEntityException("Metadata schema name cannot be updated.");
} }
if (isBlank(metadataSchemaRest.getNamespace())) { if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank"); throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
@@ -156,12 +165,6 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
throw new UnprocessableEntityException("ID in request doesn't match path ID"); throw new UnprocessableEntityException("ID in request doesn't match path ID");
} }
MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
metadataSchema.setName(metadataSchemaRest.getPrefix());
metadataSchema.setNamespace(metadataSchemaRest.getNamespace()); metadataSchema.setNamespace(metadataSchemaRest.getNamespace());
try { try {

View File

@@ -47,7 +47,7 @@ public class ProcessFileTypesLinkRepository extends AbstractDSpaceRestRepository
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong * @throws AuthorizeException If something goes wrong
*/ */
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request, public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request,
Integer processId, Integer processId,
@Nullable Pageable optionalPageable, @Nullable Pageable optionalPageable,

View File

@@ -47,7 +47,7 @@ public class ProcessFilesLinkRepository extends AbstractDSpaceRestRepository imp
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong * @throws AuthorizeException If something goes wrong
*/ */
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
public Page<BitstreamRest> getFilesFromProcess(@Nullable HttpServletRequest request, public Page<BitstreamRest> getFilesFromProcess(@Nullable HttpServletRequest request,
Integer processId, Integer processId,
@Nullable Pageable optionalPageable, @Nullable Pageable optionalPageable,

View File

@@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
* @throws AuthorizeException If something goes wrong * @throws AuthorizeException If something goes wrong
*/ */
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request, public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request,
Integer processId, Integer processId,
@Nullable Pageable optionalPageable, @Nullable Pageable optionalPageable,

View File

@@ -94,6 +94,22 @@ public class ProcessRestRepository extends DSpaceRestRepository<ProcessRest, Int
} }
} }
@SearchRestMethod(name = "own")
@PreAuthorize("hasAuthority('AUTHENTICATED')")
public Page<ProcessRest> findByCurrentUser(Pageable pageable) {
try {
Context context = obtainContext();
long total = processService.countByUser(context, context.getCurrentUser());
List<Process> processes = processService.findByUser(context, context.getCurrentUser(),
pageable.getPageSize(),
Math.toIntExact(pageable.getOffset()));
return converter.toRestPage(processes, pageable, total, utils.obtainProjection());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
/** /**
* Calls on the getBitstreams method to retrieve all the Bitstreams of this process * Calls on the getBitstreams method to retrieve all the Bitstreams of this process
* @param processId The processId of the Process to retrieve the Bitstreams for * @param processId The processId of the Process to retrieve the Bitstreams for

View File

@@ -37,6 +37,7 @@ import org.dspace.scripts.service.ScriptService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -56,29 +57,24 @@ public class ScriptRestRepository extends DSpaceRestRepository<ScriptRest, Strin
@Autowired @Autowired
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
// TODO: findOne() currently requires site ADMIN permissions as all scripts are admin-only at this time.
// If scripts ever need to be accessible to Comm/Coll Admins, we would likely need to create a new GrantedAuthority
// for Comm/Coll Admins in EPersonRestAuthenticationProvider to use on this endpoint
@Override @Override
@PreAuthorize("hasAuthority('ADMIN')") // authorization is verified inside the method
@PreAuthorize("hasAuthority('AUTHENTICATED')")
public ScriptRest findOne(Context context, String name) { public ScriptRest findOne(Context context, String name) {
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(name); ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(name);
if (scriptConfiguration != null) { if (scriptConfiguration != null) {
if (scriptConfiguration.isAllowedToExecute(context)) { if (scriptConfiguration.isAllowedToExecute(context, null)) {
return converter.toRest(scriptConfiguration, utils.obtainProjection()); return converter.toRest(scriptConfiguration, utils.obtainProjection());
} else { } else {
throw new AccessDeniedException("The current user was not authorized to access this script"); throw new AccessDeniedException("The current user was not authorized to access this script");
} }
} }
throw new DSpaceBadRequestException("The script with name: " + name + " could not be found"); return null;
} }
// TODO: findAll() currently requires site ADMIN permissions as all scripts are admin-only at this time.
// If scripts ever need to be accessible to Comm/Coll Admins, we would likely need to create a new GrantedAuthority
// for Comm/Coll Admins in EPersonRestAuthenticationProvider to use on this endpoint
@Override @Override
@PreAuthorize("hasAuthority('ADMIN')") // authorization check is performed inside the script service
@PreAuthorize("hasAuthority('AUTHENTICATED')")
public Page<ScriptRest> findAll(Context context, Pageable pageable) { public Page<ScriptRest> findAll(Context context, Pageable pageable) {
List<ScriptConfiguration> scriptConfigurations = List<ScriptConfiguration> scriptConfigurations =
scriptService.getScriptConfigurations(context); scriptService.getScriptConfigurations(context);
@@ -104,11 +100,17 @@ public class ScriptRestRepository extends DSpaceRestRepository<ScriptRest, Strin
List<DSpaceCommandLineParameter> dSpaceCommandLineParameters = List<DSpaceCommandLineParameter> dSpaceCommandLineParameters =
processPropertiesToDSpaceCommandLineParameters(properties); processPropertiesToDSpaceCommandLineParameters(properties);
ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName);
if (scriptToExecute == null) { if (scriptToExecute == null) {
throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found");
} }
if (!scriptToExecute.isAllowedToExecute(context)) { try {
throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) {
throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName
+ " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", "));
}
} catch (IllegalArgumentException e) {
throw new DSpaceBadRequestException("missed handle");
} }
RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler( RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler(
context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters, context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters,

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository.patch.operation;
import java.io.IOException;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.rest.exception.RESTBitstreamNotFoundException;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Bitstream;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.stereotype.Component;
/**
* A PATCH operation for removing bitstreams in bulk from the repository.
*
* Example: <code>
* curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json"
* -d '[
* {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"}
* ]'
* </code>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@Component
public class BitstreamRemoveOperation extends PatchOperation<Bitstream> {
@Autowired
BitstreamService bitstreamService;
@Autowired
AuthorizeService authorizeService;
public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/";
@Override
public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException {
String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, "");
Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete));
if (bitstreamToDelete == null) {
throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete);
}
authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE);
try {
bitstreamService.delete(context, bitstreamToDelete);
} catch (AuthorizeException | IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
return null;
}
@Override
public boolean supports(Object objectToMatch, Operation operation) {
return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) &&
operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE);
}
public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation)
throws SQLException {
try {
authorizeService.authorizeAction(context, bitstream, operation);
} catch (AuthorizeException e) {
throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e);
}
}
}

View File

@@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
@@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
private Collection collection; private Collection collection;
private Path workDir; private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before @Before
@Override @Override
@@ -126,6 +128,10 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
checkMetadata(); checkMetadata();
checkMetadataWithAnotherSchema(); checkMetadataWithAnotherSchema();
checkBitstream(); checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
} }
@Test @Test

View File

@@ -7,12 +7,16 @@
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist;
import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE;
import static org.dspace.core.Constants.WRITE; import static org.dspace.core.Constants.WRITE;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
@@ -21,9 +25,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.ws.rs.core.MediaType;
import org.apache.commons.codec.CharEncoding; import org.apache.commons.codec.CharEncoding;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@@ -33,6 +39,7 @@ import org.dspace.app.rest.matcher.BundleMatcher;
import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.HalMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.RemoveOperation;
import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.model.patch.ReplaceOperation;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.app.rest.test.MetadataPatchSuite;
@@ -41,6 +48,7 @@ import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.BundleBuilder; import org.dspace.builder.BundleBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.builder.ResourcePolicyBuilder;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -52,15 +60,20 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.web.servlet.MvcResult;
public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest {
@@ -79,6 +92,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
@Autowired @Autowired
private ItemService itemService; private ItemService itemService;
@Autowired
CollectionService collectionService;
@Autowired
CommunityService communityService;
@Test @Test
public void findAllTest() throws Exception { public void findAllTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below //We turn off the authorization system in order to create the structure as defined below
@@ -2370,6 +2389,513 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
)); ));
} }
@Test
public void deleteBitstreamsInBulk() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Verify that only the three bitstreams were deleted and the fourth one still exists
Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3));
Assert.assertTrue(bitstreamExists(token, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidUUID() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// For the third bitstream, use an invalid UUID
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
UUID randomUUID = UUID.randomUUID();
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID);
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
MvcResult result = getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnprocessableEntity())
.andReturn();
// Verify our custom error message is returned when an invalid UUID is used
assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository",
result.getResponse().getErrorMessage());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// But set the rest.patch.operations.limit property to 2, so that the request is invalid
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isBadRequest());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_Unauthorized() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient().perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnauthorized());
}
@Test
public void deleteBitstreamsInBulk_Forbidden() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
}
@Test
public void deleteBitstreamsInBulk_collectionAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson col1Admin = EPersonBuilder.createEPerson(context)
.withEmail("col1admin@test.com")
.withPassword(password)
.build();
EPerson col2Admin = EPersonBuilder.createEPerson(context)
.withEmail("col2admin@test.com")
.withPassword(password)
.build();
Group col1_AdminGroup = collectionService.createAdministrators(context, col1);
Group col2_AdminGroup = collectionService.createAdministrators(context, col2);
groupService.addMember(context, col1_AdminGroup, col1Admin);
groupService.addMember(context, col2_AdminGroup, col2Admin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(col1Admin.getEmail(), password);
// Should return forbidden since one of the bitstreams does not originate form collection 1
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(2);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Change the token to the admin of collection 2
token = getAuthToken(col2Admin.getEmail(), password);
// Add three out of four bitstreams to the list of bitstreams to be deleted
ops = new ArrayList<>();
removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp1);
removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp2);
removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID());
ops.add(removeOp3);
patchBody = getPatchContent(ops);
// Should return forbidden since one of the bitstreams does not originate form collection 2
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(0);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
@Test
public void deleteBitstreamsInBulk_communityAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context)
.withEmail("parentComAdmin@test.com")
.withPassword(password)
.build();
Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity);
groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(parentCommunityAdmin.getEmail(), password);
// Bitstreams originate from two different collections, but those collections live in the same community, so
// a community admin should be able to delete them
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_OK) {
return false;
}
}
return true;
}
public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_NOT_FOUND) {
return false;
}
}
return true;
}
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@@ -63,22 +64,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
//We expect the content type to be "application/hal+json;charset=UTF-8" //We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType)) .andExpect(content().contentType(contentType))
//Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page
// object // object
.andExpect(jsonPath("$.page.size", is(20))) .andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(4))) .andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.totalPages", is(1)))
.andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$.page.number", is(0)))
//The array of browse index should have a size 4 //The array of browse index should have a size 5
.andExpect(jsonPath("$._embedded.browses", hasSize(4))) .andExpect(jsonPath("$._embedded.browses", hasSize(5)))
//Check that all (and only) the default browse indexes are present //Check that all (and only) the default browse indexes are present
.andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder(
BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.dateIssuedBrowseIndex("asc"),
BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"),
BrowseIndexMatcher.titleBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"),
BrowseIndexMatcher.subjectBrowseIndex("asc") BrowseIndexMatcher.subjectBrowseIndex("asc"),
BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")
))) )))
; ;
} }
@@ -125,6 +127,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
; ;
} }
@Test
public void findBrowseByVocabulary() throws Exception {
//Use srsc as this vocabulary is included by default
//When we call the root endpoint
getClient().perform(get("/api/discover/browses/srsc"))
//The status has to be 200 OK
.andExpect(status().isOk())
//We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType))
//Check that the JSON root matches the expected browse index
.andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")))
;
}
@Test @Test
public void findBrowseBySubject() throws Exception { public void findBrowseBySubject() throws Exception {
//When we call the root endpoint //When we call the root endpoint
@@ -2142,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author" // The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author"))) .andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse // It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true))) .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)))
; ;
} }
@@ -2159,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author" // The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author"))) .andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse // It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true))); .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)));
} }
@Test @Test

View File

@@ -153,6 +153,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums.add(title); metadatums.add(title);
metadatums.add(author); metadatums.add(author);
@@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums.add(issn); metadatums.add(issn);
metadatums.add(volume); metadatums.add(volume);
metadatums.add(issue); metadatums.add(issue);
metadatums.add(publisher);
ImportRecord firstrRecord = new ImportRecord(metadatums); ImportRecord firstrRecord = new ImportRecord(metadatums);
@@ -179,6 +182,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums2.add(title2); metadatums2.add(title2);
metadatums2.add(author2); metadatums2.add(author2);
@@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums2.add(issn2); metadatums2.add(issn2);
metadatums2.add(volume2); metadatums2.add(volume2);
metadatums2.add(issue2); metadatums2.add(issue2);
metadatums2.add(publisher2);
ImportRecord secondRecord = new ImportRecord(metadatums2); ImportRecord secondRecord = new ImportRecord(metadatums2);
records.add(firstrRecord); records.add(firstrRecord);
@@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
return records; return records;
} }
} }

View File

@@ -0,0 +1,677 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.dspace.app.rest.matcher.FacetEntryMatcher;
import org.dspace.app.rest.matcher.FacetValueMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.service.CollectionService;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This class tests the correct inheritance of Discovery configurations for sub communities and collections.
* To thoroughly test this, a community and collection structure is set up to where different communities have custom
* configurations configured for them.
*
* The following structure is uses:
* - Parent Community 1 - Custom configuration: discovery-parent-community-1
* -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1
* -- Collection 111 - Custom configuration: discovery-collection-1-1-1
* -- Collection 112
* -- Subcommunity 12
* -- Collection 121 - Custom configuration: discovery-collection-1-2-1
* -- Collection 122
* - Parent Community 2
* -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1
* -- Collection 211 - Custom configuration: discovery-collection-2-1-1
* -- Collection 212
* -- Subcommunity 22
* -- Collection 221 - Custom configuration: discovery-collection-2-2-1
* -- Collection 222
*
* Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is
* indexed and provided for the different search scopes.
*
* Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222,
* and one in collection 122 and 211.
*
* The tests will verify that for each object, the correct facets are provided and that all the necessary fields to
* power these facets are indexed properly.
*
* This file requires the discovery configuration in the following test file:
* src/test/data/dspaceFolder/config/spring/api/test-discovery.xml
*/
public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest {
@Autowired
CollectionService collectionService;
private Community parentCommunity1;
private Community subcommunity11;
private Community subcommunity12;
private Collection collection111;
private Collection collection112;
private Collection collection121;
private Collection collection122;
private Community parentCommunity2;
private Community subcommunity21;
private Community subcommunity22;
private Collection collection211;
private Collection collection212;
private Collection collection221;
private Collection collection222;
@Before
public void setUp() throws Exception {
super.setUp();
context.turnOffAuthorisationSystem();
MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build();
parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
.build();
subcommunity11 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1")
.build();
subcommunity12 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2")
.build();
collection111 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1")
.build();
collection112 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2")
.build();
collection121 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1")
.build();
collection122 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2")
.build();
parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
.build();
subcommunity21 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1")
.build();
subcommunity22 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2")
.build();
collection211 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1")
.build();
collection212 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2")
.build();
collection221 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1")
.build();
collection222 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2")
.build();
Item item111 = ItemBuilder.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-item111")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111")
.withMetadata("dc", "test", "collection111field", "collection111field-item111")
.withMetadata("dc", "test", "collection121field", "collection121field-item111")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111")
.withMetadata("dc", "test", "collection211field", "collection211field-item111")
.withMetadata("dc", "test", "collection221field", "collection221field-item111")
.build();
Item item112 = ItemBuilder.createItem(context, collection112)
.withMetadata("dc", "contributor", "author", "author-item112")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112")
.withMetadata("dc", "test", "collection111field", "collection111field-item112")
.withMetadata("dc", "test", "collection121field", "collection121field-item112")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112")
.withMetadata("dc", "test", "collection211field", "collection211field-item112")
.withMetadata("dc", "test", "collection221field", "collection221field-item112")
.build();
Item item121 = ItemBuilder.createItem(context, collection121)
.withMetadata("dc", "contributor", "author", "author-item121")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121")
.withMetadata("dc", "test", "collection111field", "collection111field-item121")
.withMetadata("dc", "test", "collection121field", "collection121field-item121")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121")
.withMetadata("dc", "test", "collection211field", "collection211field-item121")
.withMetadata("dc", "test", "collection221field", "collection221field-item121")
.build();
Item item122 = ItemBuilder.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-item122")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122")
.withMetadata("dc", "test", "collection111field", "collection111field-item122")
.withMetadata("dc", "test", "collection121field", "collection121field-item122")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122")
.withMetadata("dc", "test", "collection211field", "collection211field-item122")
.withMetadata("dc", "test", "collection221field", "collection221field-item122")
.build();
Item item211 = ItemBuilder.createItem(context, collection211)
.withMetadata("dc", "contributor", "author", "author-item211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211")
.withMetadata("dc", "test", "collection111field", "collection111field-item211")
.withMetadata("dc", "test", "collection121field", "collection121field-item211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211")
.withMetadata("dc", "test", "collection211field", "collection211field-item211")
.withMetadata("dc", "test", "collection221field", "collection221field-item211")
.build();
Item item212 = ItemBuilder.createItem(context, collection212)
.withMetadata("dc", "contributor", "author", "author-item212")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212")
.withMetadata("dc", "test", "collection111field", "collection111field-item212")
.withMetadata("dc", "test", "collection121field", "collection121field-item212")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212")
.withMetadata("dc", "test", "collection211field", "collection211field-item212")
.withMetadata("dc", "test", "collection221field", "collection221field-item212")
.build();
Item item221 = ItemBuilder.createItem(context, collection221)
.withMetadata("dc", "contributor", "author", "author-item221")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221")
.withMetadata("dc", "test", "collection111field", "collection111field-item221")
.withMetadata("dc", "test", "collection121field", "collection121field-item221")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221")
.withMetadata("dc", "test", "collection211field", "collection211field-item221")
.withMetadata("dc", "test", "collection221field", "collection221field-item221")
.build();
Item item222 = ItemBuilder.createItem(context, collection222)
.withMetadata("dc", "contributor", "author", "author-item222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222")
.withMetadata("dc", "test", "collection111field", "collection111field-item222")
.withMetadata("dc", "test", "collection121field", "collection121field-item222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222")
.withMetadata("dc", "test", "collection211field", "collection211field-item222")
.withMetadata("dc", "test", "collection221field", "collection221field-item222")
.build();
Item mappedItem111222 = ItemBuilder
.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-mappedItem111222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222")
.build();
Item mappedItem122211 = ItemBuilder
.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-mappedItem122211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211")
.build();
collectionService.addItem(context, collection222, mappedItem111222);
collectionService.addItem(context, collection211, mappedItem122211);
context.dispatchEvents();
context.restoreAuthSystemState();
}
@Test
/**
* Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item111", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item112", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem111222",
1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item111", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111.
*/
public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection111field", "text")))
);
getClient().perform(get("/api/discover/facets/collection111field")
.param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection111field",
"collection111field-item111", 1),
FacetValueMatcher.matchEntry("collection111field",
"collection111field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited
* correctly for Collection 112.
*/
public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Subcommunity 12.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121.
*/
public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection121field", "text")))
);
getClient().perform(get("/api/discover/facets/collection121field")
.param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection121field",
"collection121field-item121", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Collection 122.
*/
public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Parent Community 2.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item211", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211.
*/
public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection211field", "text")))
);
getClient().perform(get("/api/discover/facets/collection211field")
.param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection211field",
"collection211field-item211", 1),
FacetValueMatcher.matchEntry("collection211field",
"collection211field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited
* correctly for Collection 212.
*/
public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Subcommunity 22.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221.
*/
public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection221field", "text")))
);
getClient().perform(get("/api/discover/facets/collection221field")
.param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection221field",
"collection221field-item221", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Collection 222.
*/
public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
}

View File

@@ -88,7 +88,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build(); .build();
context.restoreAuthSystemState(); context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT);
@@ -116,6 +116,41 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
} }
} }
@Test
public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT);
metadataSchemaRest.setPrefix("test.SchemaName");
metadataSchemaRest.setNamespace(TEST_NAMESPACE);
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataSchemaRest.setPrefix("test,SchemaName");
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataSchemaRest.setPrefix("test SchemaName");
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test @Test
public void createUnauthorizedTest() public void createUnauthorizedTest()
throws Exception { throws Exception {
@@ -202,7 +237,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID()); metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); metadataSchemaRest.setPrefix(TEST_NAME);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password)) getClient(getAuthToken(admin.getEmail(), password))
@@ -214,7 +249,33 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher .andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED)));
}
@Test
public void update_schemaNameShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadataschemas/" + metadataSchema.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME, TEST_NAMESPACE)));
} }
@Test @Test

View File

@@ -9,6 +9,7 @@ package org.dspace.app.rest;
import static com.jayway.jsonpath.JsonPath.read; import static com.jayway.jsonpath.JsonPath.read;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
@@ -49,12 +50,12 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest {
private static final String ELEMENT = "test element"; private static final String ELEMENT = "test_element";
private static final String QUALIFIER = "test qualifier"; private static final String QUALIFIER = "test_qualifier";
private static final String SCOPE_NOTE = "test scope_note"; private static final String SCOPE_NOTE = "test scope_note";
private static final String ELEMENT_UPDATED = "test element updated"; private static final String ELEMENT_UPDATED = "test_element_updated";
private static final String QUALIFIER_UPDATED = "test qualifier updated"; private static final String QUALIFIER_UPDATED = "test_qualifier_updated";
private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated";
private MetadataSchema metadataSchema; private MetadataSchema metadataSchema;
@@ -564,6 +565,70 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.andExpect(status().isUnprocessableEntity()); .andExpect(status().isUnprocessableEntity());
} }
@Test
public void findByFieldName_sortByFieldNameASC() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
"http://www.dspace.org/ns/aschema").build();
MetadataField metadataField1 = MetadataFieldBuilder
.createMetadataField(context, schema, "2", null, "AScopeNote").build();
MetadataField metadataField2 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", null, "AScopeNote").build();
MetadataField metadataField3 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", "a", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("query", schema.getName())
.param("sort", "fieldName,ASC"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.metadatafields", contains(
MetadataFieldMatcher.matchMetadataField(metadataField2),
MetadataFieldMatcher.matchMetadataField(metadataField3),
MetadataFieldMatcher.matchMetadataField(metadataField1)
)))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(3)));
}
@Test
public void findByFieldName_sortByFieldNameDESC() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
"http://www.dspace.org/ns/aschema").build();
MetadataField metadataField1 = MetadataFieldBuilder
.createMetadataField(context, schema, "2", null, "AScopeNote").build();
MetadataField metadataField2 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", null, "AScopeNote").build();
MetadataField metadataField3 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", "a", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("query", schema.getName())
.param("sort", "fieldName,DESC"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.metadatafields", contains(
MetadataFieldMatcher.matchMetadataField(metadataField1),
MetadataFieldMatcher.matchMetadataField(metadataField3),
MetadataFieldMatcher.matchMetadataField(metadataField2)
)))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(3)));
}
@Test @Test
public void createSuccess() throws Exception { public void createSuccess() throws Exception {
@@ -575,7 +640,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password); String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>(); AtomicReference<Integer> idRef = new AtomicReference<>();
try { try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken) getClient(authToken)
.perform(post("/api/core/metadatafields") .perform(post("/api/core/metadatafields")
@@ -606,7 +672,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password); String authToken = getAuthToken(admin.getEmail(), password);
Integer id = null; Integer id = null;
try { try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
null), nullValue());
id = read( id = read(
getClient(authToken) getClient(authToken)
@@ -641,7 +708,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password); String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>(); AtomicReference<Integer> idRef = new AtomicReference<>();
try { try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken) getClient(authToken)
.perform(post("/api/core/metadatafields") .perform(post("/api/core/metadatafields")
@@ -689,6 +757,94 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.andExpect(status().isUnauthorized()); .andExpect(status().isUnauthorized());
} }
@Test
public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement("testElement.ForCreate");
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String authToken = getAuthToken(admin.getEmail(), password);
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setElement("testElement,ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setElement("testElement ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test
public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier("testQualifier.ForCreate");
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String authToken = getAuthToken(admin.getEmail(), password);
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setQualifier("testQualifier,ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setQualifier("testQualifier ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test @Test
public void createUnauthorizedEPersonNoAdminRights() throws Exception { public void createUnauthorizedEPersonNoAdminRights() throws Exception {
@@ -832,31 +988,81 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build(); .build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isOk());
}
@Test
public void update_elementShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState(); context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT_UPDATED); metadataFieldRest.setElement(ELEMENT_UPDATED);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT, QUALIFIER)
));
}
@Test
public void update_qualifierShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password)) getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID()) .perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType)) .contentType(contentType))
.andExpect(status().isOk()); .andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) metadataSchema.getName(), ELEMENT, QUALIFIER)
)); ));
} }
@Test @Test
public void update_checkUpdatedInIndex() throws Exception { public void update_checkNotUpdatedInIndex() throws Exception {
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
@@ -885,27 +1091,27 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.perform(put("/api/core/metadatafields/" + metadataField.getID()) .perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType)) .contentType(contentType))
.andExpect(status().isOk()); .andExpect(status().isUnprocessableEntity());
// new metadata field found in index // new metadata field not found in index
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("schema", metadataSchema.getName()) .param("schema", metadataSchema.getName())
.param("element", ELEMENT_UPDATED) .param("element", ELEMENT_UPDATED)
.param("qualifier", QUALIFIER_UPDATED)) .param("qualifier", QUALIFIER_UPDATED))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( .andExpect(jsonPath("$.page.totalElements", is(0)));
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
ELEMENT_UPDATED, QUALIFIER_UPDATED))
))
.andExpect(jsonPath("$.page.totalElements", is(1)));
// original metadata field not found in index // original metadata field found in index
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("schema", metadataSchema.getName()) .param("schema", metadataSchema.getName())
.param("element", metadataField.getElement()) .param("element", metadataField.getElement())
.param("qualifier", metadataField.getQualifier())) .param("qualifier", metadataField.getQualifier()))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$.page.totalElements", is(0))); .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
ELEMENT, QUALIFIER))
))
.andExpect(jsonPath("$.page.totalElements", is(1)));
} }
@Test @Test

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess;
import static org.dspace.content.ProcessStatus.SCHEDULED;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@@ -220,22 +222,35 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
@Test @Test
public void getProcessFiles() throws Exception { public void getProcessFiles() throws Exception {
context.setCurrentUser(eperson);
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
processService.appendFile(context, process, is, "inputfile", "test.csv"); processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
} }
Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile");
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv")))
.andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString())))
.andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" +
"[0].value", is("inputfile"))); "[0].value", is("inputfile")));
getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content"))
.andExpect(status().isOk());
// also the user that triggered the process should be able to access the process' files
String epersonToken = getAuthToken(eperson.getEmail(), password);
getClient(epersonToken)
.perform(get("/api/system/processes/" + newProcess.getID() + "/files"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.files[0].name", is("test.csv")))
.andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString())))
.andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" +
"[0].value", is("inputfile")));
getClient(epersonToken)
.perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content"))
.andExpect(status().isOk());
} }
@Test @Test
@@ -243,25 +258,34 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
processService.appendFile(context, process, is, "inputfile", "test.csv"); processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
} }
Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile");
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv")))
.andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString())))
.andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" +
"[0].value", is("inputfile")));
// also the user that triggered the process should be able to access the process' files
String epersonToken = getAuthToken(eperson.getEmail(), password);
getClient(epersonToken)
.perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv")))
.andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString())))
.andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" +
"[0].value", is("inputfile"))); "[0].value", is("inputfile")));
} }
@Test @Test
public void getProcessFilesTypes() throws Exception { public void getProcessFilesTypes() throws Exception {
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
processService.appendFile(context, process, is, "inputfile", "test.csv"); processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
} }
List<String> fileTypesToCheck = new LinkedList<>(); List<String> fileTypesToCheck = new LinkedList<>();
@@ -269,12 +293,18 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$", ProcessFileTypesMatcher .andExpect(jsonPath("$", ProcessFileTypesMatcher
.matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck)));
// also the user that triggered the process should be able to access the process' files
String epersonToken = getAuthToken(eperson.getEmail(), password);
getClient(epersonToken)
.perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes"))
.andExpect(status().isOk())
.andExpect(jsonPath("$", ProcessFileTypesMatcher
.matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck)));
} }
@Test @Test
@@ -783,27 +813,68 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
.andExpect(status().isBadRequest()); .andExpect(status().isBadRequest());
} }
@Test
public void testFindByCurrentUser() throws Exception {
Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
.withStartAndEndTime("10/01/1990", "20/01/1990")
.build();
ProcessBuilder.createProcess(context, admin, "mock-script", parameters)
.withStartAndEndTime("11/01/1990", "19/01/1990")
.build();
Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
.withStartAndEndTime("12/01/1990", "18/01/1990")
.build();
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/system/processes/search/own"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.processes", contains(
matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED),
matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED))))
.andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2))));
}
@Test @Test
public void getProcessOutput() throws Exception { public void getProcessOutput() throws Exception {
context.setCurrentUser(eperson);
Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
.withStartAndEndTime("10/01/1990", "20/01/1990")
.build();
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO);
} }
processService.createLogBitstream(context, process); processService.createLogBitstream(context, process1);
List<String> fileTypesToCheck = new LinkedList<>(); List<String> fileTypesToCheck = new LinkedList<>();
fileTypesToCheck.add("inputfile"); fileTypesToCheck.add("inputfile");
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output"))
.andExpect(status().isOk()) .andExpect(status().isOk())
.andExpect(jsonPath("$.name", .andExpect(jsonPath("$.name",
is(process.getName() + process.getID() + ".log"))) is(process1.getName() + process1.getID() + ".log")))
.andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.type", is("bitstream")))
.andExpect(jsonPath("$.metadata['dc.title'][0].value", .andExpect(jsonPath("$.metadata['dc.title'][0].value",
is(process.getName() + process.getID() + ".log"))) is(process1.getName() + process1.getID() + ".log")))
.andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value",
is("script_output"))); is("script_output")));
String epersonToken = getAuthToken(eperson.getEmail(), password);
getClient(epersonToken)
.perform(get("/api/system/processes/" + process1.getID() + "/output"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.name",
is(process1.getName() + process1.getID() + ".log")))
.andExpect(jsonPath("$.type", is("bitstream")))
.andExpect(jsonPath("$.metadata['dc.title'][0].value",
is(process1.getName() + process1.getID() + ".log")))
.andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value",
is("script_output")));
} }
} }

View File

@@ -0,0 +1,213 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.impl.client.CloseableHttpClient;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Integration tests for {@link PubmedImportMetadataSourceServiceImpl}
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest {
@Autowired
private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl;
@Autowired
private LiveImportClientImpl liveImportClientImpl;
@Test
public void pubmedImportMetadataGetRecordsTest() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
@Test
public void pubmedImportMetadataGetRecords2Test() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords2();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
private ArrayList<ImportRecord> getRecords() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null,
"Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review.");
MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize"
+ " the main strategies for teaching clinical reasoning described in the literature in the context of"
+ " advanced clinical practice and promote new areas of research to improve the pedagogical approach"
+ " to clinical reasoning in Advanced Practice Nursing.");
MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and"
+ " clinical thinking are essential elements in the advanced nursing clinical practice decision-making"
+ " process. The quality improvement of care is related to the development of those skills."
+ " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical"
+ " reasoning in advanced clinical practice.");
MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was"
+ " conducted using the framework developed by Arksey and O'Malley as a research strategy."
+ " Consistent with the nature of scoping reviews, a study protocol has been established.");
MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and"
+ " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary"
+ " revision studies, published in biomedical databases, were selected, including qualitative ones."
+ " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID."
+ " Three authors independently evaluated the articles for titles, abstracts, and full text.");
MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined,"
+ " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility,"
+ " and 27 were included in the scoping review. The results that emerged from the review were"
+ " interpreted and grouped into three macro strategies (simulations-based education, art and visual"
+ " thinking, and other learning approaches) and nineteen educational interventions.");
MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different"
+ " strategies, the simulations are the most used. Despite this, our scoping review reveals that is"
+ " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic"
+ " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to"
+ " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to"
+ " acquire an adequate level of judgment and critical thinking. Therefore, it will be"
+ " necessary to relate teaching methodologies with the skills developed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana");
MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola");
MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina");
MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia");
MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing");
MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning");
MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking");
MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies");
MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education");
MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology");
metadatums.add(title);
metadatums.add(description1);
metadatums.add(description2);
metadatums.add(description3);
metadatums.add(description4);
metadatums.add(description5);
metadatums.add(description6);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(author3);
metadatums.add(author4);
metadatums.add(author5);
metadatums.add(author6);
metadatums.add(date);
metadatums.add(language);
metadatums.add(subject1);
metadatums.add(subject2);
metadatums.add(subject3);
metadatums.add(subject4);
metadatums.add(subject5);
metadatums.add(subject6);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
private ArrayList<ImportRecord> getRecords2() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez.");
MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely"
+ " used interfaces for the retrieval of information from biological databases is the NCBI Entrez"
+ " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between"
+ " the individual entries found in numerous public databases. The existence of such natural"
+ " connections, mostly biological in nature, argued for the development of a method through which"
+ " all the information about a particular biological entity could be found without having to"
+ " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based"
+ " searches, illustrating the types of information that can be retrieved through the Entrez system."
+ " An alternate protocol builds upon the first basic protocol, using additional,"
+ " built-in features of the Entrez system, and providing alternative ways to issue the initial query."
+ " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure"
+ " visualization tool, is also discussed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
metadatums.add(title);
metadatums.add(description);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(date);
metadatums.add(language);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
}

View File

@@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
@@ -44,6 +45,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.GroupBuilder; import org.dspace.builder.GroupBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.ProcessBuilder; import org.dspace.builder.ProcessBuilder;
@@ -53,6 +55,7 @@ import org.dspace.content.Community;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
@@ -123,12 +126,72 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
@Test @Test
public void findAllScriptsUnauthorizedTest() throws Exception { public void findAllScriptsGenericLoggedInUserTest() throws Exception {
String token = getAuthToken(eperson.getEmail(), password); String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/system/scripts")) getClient(token).perform(get("/api/system/scripts"))
.andExpect(status().isForbidden()); .andExpect(status().isOk())
.andExpect(jsonPath("$.page.totalElements", is(0)));
}
@Test
public void findAllScriptsAnonymousUserTest() throws Exception {
// this should be changed once we allow anonymous user to execute some scripts
getClient().perform(get("/api/system/scripts"))
.andExpect(status().isUnauthorized());
}
@Test
public void findAllScriptsLocalAdminsTest() throws Exception {
context.turnOffAuthorisationSystem();
EPerson comAdmin = EPersonBuilder.createEPerson(context)
.withEmail("comAdmin@example.com")
.withPassword(password).build();
EPerson colAdmin = EPersonBuilder.createEPerson(context)
.withEmail("colAdmin@example.com")
.withPassword(password).build();
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
.withEmail("itemAdmin@example.com")
.withPassword(password).build();
Community community = CommunityBuilder.createCommunity(context)
.withName("Community")
.withAdminGroup(comAdmin)
.build();
Collection collection = CollectionBuilder.createCollection(context, community)
.withName("Collection")
.withAdminGroup(colAdmin)
.build();
ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
.withTitle("Test item to curate").build();
context.restoreAuthSystemState();
ScriptConfiguration curateScriptConfiguration =
scriptConfigurations.stream().filter(scriptConfiguration
-> scriptConfiguration.getName().equals("curate"))
.findAny().get();
// the local admins have at least access to the curate script
// and not access to process-cleaner script
String comAdminToken = getAuthToken(comAdmin.getEmail(), password);
getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription()))))
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
String colAdminToken = getAuthToken(colAdmin.getEmail(), password);
getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription()))))
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password);
getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription()))))
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
} }
@Test @Test
@@ -222,6 +285,63 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
)); ));
} }
@Test
public void findOneScriptByNameLocalAdminsTest() throws Exception {
context.turnOffAuthorisationSystem();
EPerson comAdmin = EPersonBuilder.createEPerson(context)
.withEmail("comAdmin@example.com")
.withPassword(password).build();
EPerson colAdmin = EPersonBuilder.createEPerson(context)
.withEmail("colAdmin@example.com")
.withPassword(password).build();
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
.withEmail("itemAdmin@example.com")
.withPassword(password).build();
Community community = CommunityBuilder.createCommunity(context)
.withName("Community")
.withAdminGroup(comAdmin)
.build();
Collection collection = CollectionBuilder.createCollection(context, community)
.withName("Collection")
.withAdminGroup(colAdmin)
.build();
ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
.withTitle("Test item to curate").build();
context.restoreAuthSystemState();
ScriptConfiguration curateScriptConfiguration =
scriptConfigurations.stream().filter(scriptConfiguration
-> scriptConfiguration.getName().equals("curate"))
.findAny().get();
String comAdminToken = getAuthToken(comAdmin.getEmail(), password);
String colAdminToken = getAuthToken(colAdmin.getEmail(), password);
String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password);
getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", ScriptMatcher
.matchScript(
curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription())));
getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", ScriptMatcher
.matchScript(
curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription())));
getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", ScriptMatcher
.matchScript(
curateScriptConfiguration.getName(),
curateScriptConfiguration.getDescription())));
}
@Test
public void findOneScriptByNameNotAuthenticatedTest() throws Exception {
getClient().perform(get("/api/system/scripts/mock-script"))
.andExpect(status().isUnauthorized());
}
@Test @Test
public void findOneScriptByNameTestAccessDenied() throws Exception { public void findOneScriptByNameTestAccessDenied() throws Exception {
String token = getAuthToken(eperson.getEmail(), password); String token = getAuthToken(eperson.getEmail(), password);
@@ -235,15 +355,51 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/system/scripts/mock-script-invalid")) getClient(token).perform(get("/api/system/scripts/mock-script-invalid"))
.andExpect(status().isBadRequest()); .andExpect(status().isNotFound());
} }
/**
* This test will create a basic structure of communities, collections and items with some local admins at each
* level and verify that the local admins, nor generic users can run scripts reserved to administrator
* (i.e. default one that don't override the default
* {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation
*/
@Test @Test
public void postProcessNonAdminAuthorizeException() throws Exception { public void postProcessNonAdminAuthorizeException() throws Exception {
String token = getAuthToken(eperson.getEmail(), password); context.turnOffAuthorisationSystem();
EPerson comAdmin = EPersonBuilder.createEPerson(context)
.withEmail("comAdmin@example.com")
.withPassword(password).build();
EPerson colAdmin = EPersonBuilder.createEPerson(context)
.withEmail("colAdmin@example.com")
.withPassword(password).build();
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
.withEmail("itemAdmin@example.com")
.withPassword(password).build();
Community community = CommunityBuilder.createCommunity(context)
.withName("Community")
.withAdminGroup(comAdmin)
.build();
Collection collection = CollectionBuilder.createCollection(context, community)
.withName("Collection")
.withAdminGroup(colAdmin)
.build();
Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
.withTitle("Test item to curate").build();
context.restoreAuthSystemState();
String token = getAuthToken(eperson.getEmail(), password);
String comAdmin_token = getAuthToken(eperson.getEmail(), password);
String colAdmin_token = getAuthToken(eperson.getEmail(), password);
String itemAdmin_token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) getClient(token).perform(multipart("/api/system/scripts/mock-script/processes"))
.andExpect(status().isForbidden()); .andExpect(status().isForbidden());
getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
.andExpect(status().isForbidden());
getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
.andExpect(status().isForbidden());
getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
.andExpect(status().isForbidden());
} }
@Test @Test
@@ -277,16 +433,6 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
@Test @Test
public void postProcessAdminNoOptionsFailedStatus() throws Exception { public void postProcessAdminNoOptionsFailedStatus() throws Exception {
// List<ParameterValueRest> list = new LinkedList<>();
//
// ParameterValueRest parameterValueRest = new ParameterValueRest();
// parameterValueRest.setName("-z");
// parameterValueRest.setValue("test");
// ParameterValueRest parameterValueRest1 = new ParameterValueRest();
// parameterValueRest1.setName("-q");
// list.add(parameterValueRest);
// list.add(parameterValueRest1);
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>(); LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
parameters.add(new DSpaceCommandLineParameter("-z", "test")); parameters.add(new DSpaceCommandLineParameter("-z", "test"));
@@ -322,7 +468,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes"))
.andExpect(status().isBadRequest()); .andExpect(status().isNotFound());
} }
@Test @Test
@@ -434,12 +580,19 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
} }
@Test @Test
public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception {
String token = getAuthToken(admin.getEmail(), password); String token = getAuthToken(admin.getEmail(), password);
getClient(token)
.perform(post("/api/system/scripts/mock-script/processes"))
.andExpect(status().isBadRequest());
getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes"))
.andExpect(status().isBadRequest()); .andExpect(status().isNotFound());
} }
@Test @Test
@@ -601,9 +754,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
} }
} }
@After @After
public void destroy() throws Exception { public void destroy() throws Exception {
context.turnOffAuthorisationSystem();
CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> {
try { try {
processService.delete(context, process); processService.delete(context, process);
@@ -611,6 +764,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
}); });
context.restoreAuthSystemState();
super.destroy(); super.destroy();
} }

View File

@@ -8,6 +8,9 @@
package org.dspace.app.rest.matcher; package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL;
import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
@@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
/** /**
* Utility class to construct a Matcher for a browse index * Utility class to construct a Matcher for a browse index
@@ -31,7 +33,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> subjectBrowseIndex(final String order) { public static Matcher<? super Object> subjectBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.subject.*")), hasJsonPath("$.metadata", contains("dc.subject.*")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -44,7 +47,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> titleBrowseIndex(final String order) { public static Matcher<? super Object> titleBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.title")), hasJsonPath("$.metadata", contains("dc.title")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.dataType", equalToIgnoringCase("title")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -56,7 +60,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> contributorBrowseIndex(final String order) { public static Matcher<? super Object> contributorBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -69,7 +74,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> dateIssuedBrowseIndex(final String order) { public static Matcher<? super Object> dateIssuedBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.date.issued")), hasJsonPath("$.metadata", contains("dc.date.issued")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.dataType", equalToIgnoringCase("date")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -77,4 +83,22 @@ public class BrowseIndexMatcher {
hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items"))
); );
} }
public static Matcher<? super Object> hierarchicalBrowseIndex(final String vocabulary) {
return allOf(
hasJsonPath("$.metadata", contains("dc.subject")),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.facetType", equalToIgnoringCase("subject")),
hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)),
hasJsonPath("$._links.vocabulary.href",
is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))),
hasJsonPath("$._links.items.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))),
hasJsonPath("$._links.entries.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))),
hasJsonPath("$._links.self.href",
is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary)))
);
}
} }

View File

@@ -110,6 +110,17 @@ public class FacetEntryMatcher {
); );
} }
public static Matcher<? super Object> matchFacet(boolean hasNext, String name, String facetType) {
return allOf(
hasJsonPath("$.name", is(name)),
hasJsonPath("$.facetType", is(facetType)),
hasJsonPath("$.facetLimit", any(Integer.class)),
hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)),
hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name))
);
}
/** /**
* Check that a facet over the dc.type exists and match the default configuration * Check that a facet over the dc.type exists and match the default configuration
* *

View File

@@ -60,6 +60,16 @@ public class FacetValueMatcher {
); );
} }
public static Matcher<? super Object> matchEntry(String facet, String label, int count) {
return allOf(
hasJsonPath("$.label", is(label)),
hasJsonPath("$.type", is("discover")),
hasJsonPath("$.count", is(count)),
hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")),
hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals"))
);
}
public static Matcher<? super Object> entrySubjectWithAuthority(String label, String authority, int count) { public static Matcher<? super Object> entrySubjectWithAuthority(String label, String authority, int count) {
return allOf( return allOf(

View File

@@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest {
sortConfiguration.setSortFields(listSortField); sortConfiguration.setSortFields(listSortField);
sortConfiguration.setDefaultSortField(defaultSort);
discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); discoveryConfiguration.setSearchSortConfiguration(sortConfiguration);
DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet();
@@ -167,6 +169,16 @@ public class RestDiscoverQueryBuilderTest {
page.getOffset(), "SCORE", "ASC"); page.getOffset(), "SCORE", "ASC");
} }
@Test
public void testSortByDefaultSortField() throws Exception {
page = PageRequest.of(2, 10);
restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page);
verify(discoverQueryBuilder, times(1))
.buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(),
page.getPageSize(), page.getOffset(), null, null);
}
@Test(expected = DSpaceBadRequestException.class) @Test(expected = DSpaceBadRequestException.class)
public void testCatchIllegalArgumentException() throws Exception { public void testCatchIllegalArgumentException() throws Exception {
when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(),

View File

@@ -11,7 +11,6 @@ import java.io.InputStream;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.app.rest.converter.ScriptConverter; import org.dspace.app.rest.converter.ScriptConverter;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.configuration.ScriptConfiguration;
/** /**
@@ -28,10 +27,6 @@ public class TypeConversionTestScriptConfiguration<T extends TypeConversionTestS
} }
public boolean isAllowedToExecute(final Context context) {
return true;
}
public Options getOptions() { public Options getOptions() {
Options options = new Options(); Options options = new Options();

Some files were not shown because too many files have changed in this diff Show More