diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9..91dcfb5dfe 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a..fb228e7041 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d..aa76c09c0a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java index 4e350562bc..4f2a225d3a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -9,7 +9,6 @@ package org.dspace.app.bulkedit; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c..ff83c3ecb2 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public class HarvestScriptConfiguration extends ScriptConfigu this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27..b37df5f5ea 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index ac9db76051..b32de11f7a 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -23,6 +23,7 @@ import java.util.UUID; import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; protected File workFile = null; @@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); - if (remoteUrl && workFile != null && workFile.exists()) { + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { workFile.delete(); } } @@ -322,14 +332,23 @@ public class ItemImport extends DSpaceRunnable { */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); if (!remoteUrl) { // manage zip via upload optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); } else { // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } - if (optionalFileStream.isPresent()) { + + if (validationFileStream.isPresent()) { + // validate zip file + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); @@ -337,10 +356,32 @@ public class ItemImport extends DSpaceRunnable { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } + } + /** * Read the mapfile * @param context diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 1a71a8c4c0..98d2469b71 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; import java.io.InputStream; import java.net.URL; import java.sql.SQLException; @@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport { // If this is a zip archive, unzip it first if (zip) { if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip( @@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport { // manage zip via remote url Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } } } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index 2d304d8a7d..3f2675ea58 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee..867e684db8 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061..067c76cce8 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 2120848358..0f144fd69f 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -105,6 +108,13 @@ public class SubmissionConfigReader { */ private SubmissionConfig lastSubmissionConfig = null; + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -152,6 +162,9 @@ public class SubmissionConfigReader { } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -287,7 +300,7 @@ public class SubmissionConfigReader { * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -334,18 +347,23 @@ public class SubmissionConfigReader { * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -355,7 +373,17 @@ public class SubmissionConfigReader { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } @@ -635,4 +663,4 @@ public class SubmissionConfigReader { } return results; } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 8058caf669..fc438c234c 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -43,6 +43,7 @@ import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -755,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 14a7ff5c82..e0a94833d7 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -514,6 +514,15 @@ public interface AuthorizeService { */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 8d065c21ce..6c38c8dd66 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ import org.dspace.sort.SortOption; * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -102,7 +104,7 @@ public final class BrowseIndex { * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { try { number = -1; tableBaseName = baseName; diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index e02367f6eb..f99aab852b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ddfd38694f..5b70cc4ec0 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1047,4 +1047,24 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i return (int) resp.getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index a290cb0d99..f86b6690ad 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidQueue; @@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index f25e2c4646..ec8f8769be 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader private SubmissionConfigReader itemSubmissionConfigReader; @@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); return ma.getParentChoice(authorityName, vocabularyId, locale); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 0000000000..bf8194dbd5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.browse.BrowseIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex extends BrowseIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java index eb34de29c1..a9fd24e947 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java @@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.Choice; import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.Choices; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; /** * Broker for ChoiceAuthority plugins, and for other information configured @@ -220,4 +221,7 @@ public interface ChoiceAuthorityService { * @return the parent Choice object if any */ public Choice getParentChoice(String authorityName, String vocabularyId, String locale); + + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab); + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 69bac319c6..95ec40c7a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -14,6 +14,7 @@ import java.util.List; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; @@ -97,4 +98,26 @@ public interface ProcessDAO extends GenericDAO { List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 23ce6ce381..d719b5006c 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -24,6 +24,7 @@ import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; @@ -168,6 +169,33 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro return list(context, criteriaQuery, false, Process.class, -1, -1); } + @Override + public List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID))); + criteriaQuery.orderBy(orderList); + + return list(context, criteriaQuery, false, Process.class, limit, offset); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index a5b2b7d8d8..9ded79fada 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -455,4 +455,18 @@ public interface CollectionService public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768..2587e6b025 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public class CurationScriptConfiguration extends ScriptConfi } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba..8707b733a6 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public class IndexDiscoveryScriptConfiguration extends Sc return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index f14ca124f4..60bf52836b 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,9 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.utils.DiscoverQueryBuilder; @@ -73,35 +76,80 @@ public class SearchUtils { searchService = null; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * + * + * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -113,6 +161,18 @@ public class SearchUtils { return configurationService.getDiscoveryConfiguration(configurationName); } + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } + public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); return manager @@ -127,47 +187,55 @@ public class SearchUtils { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * + * @param context the database context * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Context context, Item item) + throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 52e0043ff4..7aece5acf3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,10 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + // Add _keyword and _filter fields which are necessary to support filtering and faceting + // for the file names + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + // Add _keyword and _filter fields which are necessary to support filtering and + // faceting for the descriptions + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", + description); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", + description); } } } @@ -65,4 +75,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { } } } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index c02c83ece6..6cb93e2993 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,23 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,9 +33,18 @@ import org.dspace.services.factory.DSpaceServicesFactory; */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. + */ + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); + public Map getMap() { return map; } @@ -41,25 +61,98 @@ public class DiscoveryConfigurationService { this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } - return getDiscoveryConfiguration(name); } - public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { + // Fall back to default configuration + if (dso == null) { + return getDiscoveryConfiguration(null, true); + } + + // Attempt to retrieve cached configuration by UUID + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); + } + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); + } + + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } + + return configuration; + } + + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { + return getDiscoveryConfiguration(name, true); + } + + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -67,12 +160,23 @@ public class DiscoveryConfigurationService { return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, - final IndexableObject dso) { + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(context, indexableObject); } } @@ -92,13 +196,25 @@ public class DiscoveryConfigurationService { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName( - DiscoveryConfigurationService.class - .getName(), - DiscoveryConfigurationService.class); + DiscoveryConfigurationService.class + .getName(), + DiscoveryConfigurationService.class); for (String key : mainService.getMap().keySet()) { System.out.println(key); @@ -126,7 +242,7 @@ public class DiscoveryConfigurationService { System.out.println("Recent submissions configuration:"); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration - .getRecentSubmissionConfiguration(); + .getRecentSubmissionConfiguration(); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index e251d1bc51..cd1a4eecb8 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -9,6 +9,7 @@ package org.dspace.discovery.configuration; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -22,6 +23,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -30,6 +36,14 @@ public class DiscoverySortConfiguration { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index c2bacfe502..817be7848d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index fc024cc524..005f9b4247 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -160,7 +160,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index 518a8ff145..bef44326fe 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -64,6 +64,7 @@ public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl implements MetadataContributor LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size()) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); - values.add(metadataFieldMapping.toDCValue(field, formatter.format(date))); - break; + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor implements MetadataContributor } j++; } - if (dcDate == null) { + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); + } else { log.info( "Failed parsing " + dateString + ", check " + "the configured dataformats in config/spring/api/pubmed-integration.xml"); diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c0..88a1033eca 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index 33fea75add..2e14aeaa36 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -129,6 +129,11 @@ public class ProcessServiceImpl implements ProcessService { return processes; } + @Override + public List findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException { + return processDAO.findByUser(context, eperson, limit, offset); + } + @Override public void start(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.RUNNING); @@ -311,6 +316,11 @@ public class ProcessServiceImpl implements ProcessService { return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); } + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + return processDAO.countByUser(context, user); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a51..abb700cb10 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public class ScriptServiceImpl implements ScriptService { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f44..642409a924 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,28 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,14 +81,23 @@ public abstract class ScriptConfiguration implements B * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } /** * The getter for the options of the Script diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index ce6a173b0e..c6fc248881 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -255,4 +255,26 @@ public interface ProcessService { */ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cb..7d1015c8e2 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ import org.springframework.beans.factory.annotation.Autowired; public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; @Override - public boolean isAllowedToExecute(Context context) { + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index 52685b563d..dd61fab967 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 9be443f5ea..efbbeedde0 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -122,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 411e8de4df..08ae3af4ae 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.nio.file.Files; @@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,6 +48,7 @@ import org.junit.Test; public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; private static final String publicationTitle = "A Tale of Two Cities"; private static final String personTitle = "Person Test"; @@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private Collection collection; private Path tempDir; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test @@ -254,6 +262,23 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkRelationship(); } + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + @Test public void resumeImportItemBySafWithMetadataOnly() throws Exception { // create simple SAF diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index ff1083d318..b20515017a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -162,8 +162,8 @@ public abstract class AbstractDSpaceObjectBuilder return (B) this; } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. + * If another ADMIN policy is in place for an eperson it will be replaced * * @param dso * the DSpaceObject on which grant the permission diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index a01aef8498..dfacd0cec3 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 70dea309f2..3e5ab0f38f 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -353,9 +353,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { } /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param ePerson epersons to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 86573940e4..0631e1b55a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -113,6 +113,9 @@ public class ProcessBuilder extends AbstractBuilder { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java index 1548ebcae0..13d037abf8 100644 --- a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java @@ -725,9 +725,6 @@ public class CollectionTest extends AbstractDSpaceObjectTest { // Allow Item REMOVE perms doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); - // Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion) - doNothing().when(authorizeServiceSpy) - .authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE)); // create & add item first context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 15e425e23a..bae6ce9e1d 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -1189,8 +1189,6 @@ public class ItemTest extends AbstractDSpaceObjectTest { doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); // Allow Item DELETE perms doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); - // Allow Item WRITE perms (required to first delete identifiers) - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); UUID id = item.getID(); itemService.delete(context, item); diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0d1cc13106..55be531418 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,14 +7,18 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -24,6 +28,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -39,6 +44,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; @@ -731,6 +738,64 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { } } + /** + * Test designed to check if default sort option for Discovery is working, using workspace + * DiscoveryConfiguration
+ * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + context.restoreAuthSystemState(); + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration + * {@code + * + * + * open.access + * + * + * } + * + * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java new file mode 100644 index 0000000000..aa511bcb92 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; + +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.repository.BitstreamRestRepository; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * REST controller for handling bulk updates to Bitstream resources. + *

+ * This controller is responsible for handling requests to the bitstream category, which allows for updating + * multiple bitstream resources in a single operation. + *

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@RestController +@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME) +public class BitstreamCategoryRestController { + @Autowired + BitstreamRestRepository bitstreamRestRepository; + + /** + * Handles PATCH requests to the bitstream category for bulk updates of bitstream resources. + * + * @param request the HTTP request object. + * @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied. + * @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a + * HTTP 204 No Content response since currently only a delete operation is supported. + * @throws SQLException if an error occurs while accessing the database. + * @throws AuthorizeException if the user is not authorized to perform the requested operation. + */ + @RequestMapping(method = RequestMethod.PATCH) + public ResponseEntity> patch(HttpServletRequest request, + @RequestBody(required = true) JsonNode jsonNode) + throws SQLException, AuthorizeException { + Context context = obtainContext(request); + bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode); + return ResponseEntity.noContent().build(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 79ca381753..665504139c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public class OpenSearchController { if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso("site", container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 196cade5dd..70149bbb6b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -12,18 +12,23 @@ import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; @@ -48,6 +53,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private ScriptService scriptService; + @Autowired private RequestService requestService; @@ -59,8 +67,8 @@ public class ScriptProcessesController { * @return The ProcessResource object for the created process * @throws Exception If something goes wrong */ - @RequestMapping(method = RequestMethod.POST) - @PreAuthorize("hasAuthority('ADMIN')") + @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") public ResponseEntity> startProcess( @PathVariable(name = "name") String scriptName, @RequestParam(name = "file", required = false) List files) @@ -75,4 +83,21 @@ public class ScriptProcessesController { return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } + @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public ResponseEntity> startProcessInvalidMimeType( + @PathVariable(name = "name") String scriptName) + throws Exception { + if (log.isTraceEnabled()) { + log.trace("Starting Process for Script with name: " + scriptName); + } + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest()); + ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + + if (scriptToExecute == null) { + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); + } + throw new DSpaceBadRequestException("Invalid mimetype"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 6ee836e5fc..2595968d4d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -7,12 +7,17 @@ */ package org.dspace.app.rest.converter; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; + import java.util.ArrayList; import java.util.List; import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.projection.Projection; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; import org.dspace.sort.SortException; import org.dspace.sort.SortOption; import org.springframework.stereotype.Component; @@ -30,18 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter metadataList = new ArrayList(); - if (obj.isMetadataIndex()) { + String id = obj.getName(); + if (obj instanceof DSpaceControlledVocabularyIndex) { + DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; + metadataList = new ArrayList<>(vocObj.getMetadataFields()); + id = vocObj.getVocabulary().getPluginInstanceName(); + bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL); + } else if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_FLAT); } + bir.setId(id); bir.setMetadataList(metadataList); List sortOptionsList = new ArrayList(); @@ -52,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverterExtend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException { + + public static String uuid; + + /** + * @param formatStr string with placeholders, ideally obtained using {@link I18nUtil} + * @return message with bitstream id substituted + */ + private static String formatMessage(String formatStr) { + MessageFormat fmt = new MessageFormat(formatStr); + return fmt.format(new String[]{uuid}); + } + + public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message"; + + public RESTBitstreamNotFoundException(String uuid) { + super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY))); + RESTBitstreamNotFoundException.uuid = uuid; + } + + public String getMessageKey() { + return MESSAGE_KEY; + } + + public String getLocalizedMessage(Context context) { + return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java index ee70dbf431..9e515984fe 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java @@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory { public static final String CATEGORY = RestAddressableModel.DISCOVER; - public static final String ITEMS = "items"; - public static final String ENTRIES = "entries"; + public static final String LINK_ITEMS = "items"; + public static final String LINK_ENTRIES = "entries"; + public static final String LINK_VOCABULARY = "vocabulary"; - boolean metadataBrowse; + // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types, + // etc. the second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_VALUE_LIST = "valueList"; + // if the browse index has one level: the full list of items + public static final String BROWSE_TYPE_FLAT = "flatBrowse"; + // if the browse index should display the vocabulary tree. The 1st level shows the tree. + // The second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse"; + // Shared fields + String browseType; @JsonProperty(value = "metadata") List metadataList; + // Single browse index fields + @JsonInclude(JsonInclude.Include.NON_NULL) String dataType; - + @JsonInclude(JsonInclude.Include.NON_NULL) List sortOptions; - + @JsonInclude(JsonInclude.Include.NON_NULL) String order; + // Hierarchical browse fields + @JsonInclude(JsonInclude.Include.NON_NULL) + String facetType; + @JsonInclude(JsonInclude.Include.NON_NULL) + String vocabulary; + @JsonIgnore @Override public String getCategory() { @@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest { return NAME; } - public boolean isMetadataBrowse() { - return metadataBrowse; - } - - public void setMetadataBrowse(boolean metadataBrowse) { - this.metadataBrowse = metadataBrowse; - } - public List getMetadataList() { return metadataList; } @@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest { this.sortOptions = sortOptions; } + /** + * - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names, + * subjects, types, etc. the second level is the actual list of items linked to a specific entry + * - flatBrowse if the browse index has one level: the full list of items + * - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree. + * The second level is the actual list of items linked to a specific entry + */ + public void setBrowseType(String browseType) { + this.browseType = browseType; + } + + public String getBrowseType() { + return browseType; + } + + public void setFacetType(String facetType) { + this.facetType = facetType; + } + + public String getFacetType() { + return facetType; + } + + public void setVocabulary(String vocabulary) { + this.vocabulary = vocabulary; + } + + + public String getVocabulary() { + return vocabulary; + } + @Override public Class getController() { return RestResourceController.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java index 7ec1b22500..b25d827e75 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java @@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest { private List filters = new LinkedList<>(); private List sortOptions = new LinkedList<>(); + private SortOption defaultSortOption; + public String getCategory() { return CATEGORY; } @@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest { return sortOptions; } + public SortOption getDefaultSortOption() { + return defaultSortOption; + } + + public void setDefaultSortOption(SortOption defaultSortOption) { + this.defaultSortOption = defaultSortOption; + } + @Override public boolean equals(Object object) { return (object instanceof SearchConfigurationRest && diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java index f6c821595f..61158704ea 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java @@ -7,9 +7,20 @@ */ package org.dspace.app.rest.model.hateoas; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +import org.atteo.evo.inflector.English; +import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.model.VocabularyRest; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.utils.Utils; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.springframework.hateoas.Link; +import org.springframework.web.util.UriComponentsBuilder; /** * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource @@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils; */ @RelNameDSpaceResource(BrowseIndexRest.NAME) public class BrowseIndexResource extends DSpaceResource { + + public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { super(bix, utils); // TODO: the following code will force the embedding of items and // entries in the browseIndex we need to find a way to populate the rels // array from the request/projection right now it is always null // super(bix, utils, "items", "entries"); - if (bix.isMetadataBrowse()) { - add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES)); + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) { + ChoiceAuthorityService choiceAuthorityService = + ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary()); + UriComponentsBuilder baseLink = linkTo( + methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null, + null, VocabularyRest.CATEGORY, + English.plural(VocabularyRest.NAME), source.getPluginInstanceName(), + "", null, null)).toUriComponentsBuilder(); + + add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY)); } - add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index ae3cf91d4c..12e27dccac 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -15,9 +15,12 @@ import java.util.List; import java.util.UUID; import javax.servlet.http.HttpServletRequest; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; +import org.dspace.app.rest.converter.JsonPatchConverter; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; @@ -38,6 +41,7 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -72,6 +76,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository operationsLimit) { + throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " + + operationsLimit); + } + resourcePatch.patch(obtainContext(), null, patch.getOperations()); + context.commit(); + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java index 93224f78cd..f608595c3d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java @@ -40,7 +40,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES) public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (bir.isMetadataBrowse() && "entries".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) && + name.equals(BrowseIndexRest.LINK_ENTRIES)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index 8ffefb619b..6aedcee6c0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -17,7 +18,10 @@ import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; import org.dspace.browse.CrossLinks; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -31,26 +35,48 @@ import org.springframework.stereotype.Component; @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) public class BrowseIndexRestRepository extends DSpaceRestRepository { + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + @Override @PreAuthorize("permitAll()") public BrowseIndexRest findOne(Context context, String name) { - BrowseIndexRest bi = null; + BrowseIndexRest bi = createFromMatchingBrowseIndex(name); + if (bi == null) { + bi = createFromMatchingVocabulary(name); + } + + return bi; + } + + private BrowseIndexRest createFromMatchingVocabulary(String name) { + DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name); + if (vocabularyIndex != null) { + return converter.toRest(vocabularyIndex, utils.obtainProjection()); + } + return null; + } + + private BrowseIndexRest createFromMatchingBrowseIndex(String name) { BrowseIndex bix; try { - bix = BrowseIndex.getBrowseIndex(name); + bix = BrowseIndex.getBrowseIndex(name); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); } if (bix != null) { - bi = converter.toRest(bix, utils.obtainProjection()); + return converter.toRest(bix, utils.obtainProjection()); } - return bi; + return null; } @Override public Page findAll(Context context, Pageable pageable) { try { - List indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); + List indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices())); + choiceAuthorityService.getChoiceAuthoritiesNames() + .stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null) + .forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name))); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java index 74aa9f38bf..baa79bc80a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java @@ -42,7 +42,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS) public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (!bir.isMetadataBrowse() && "items".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) && + name.equals(BrowseIndexRest.LINK_ITEMS)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index b5aaf3e567..4b9b7d7644 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 157a80e264..5152f11902 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -14,6 +14,7 @@ import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Objects; import javax.servlet.http.HttpServletRequest; @@ -45,10 +46,10 @@ import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - /** * This is the repository responsible to manage MetadataField Rest object * @@ -135,13 +136,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository matchingMetadataFields = new ArrayList<>(); if (StringUtils.isBlank(exactName)) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); + this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable); try { DiscoverResult searchResult = searchService.search(context, null, discoverQuery); for (IndexableObject object : searchResult.getIndexableObjects()) { @@ -149,6 +151,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository filterQueries = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { if (query.split("\\.").length > 3) { @@ -210,6 +214,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Sort.Order order = orderIterator.next(); + discoverQuery.setSortField(order.getProperty() + "_sort", + order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc : + DiscoverQuery.SORT_ORDER.desc); + } + discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); + discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; } @@ -247,10 +260,18 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository getFilesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java index f9f665d14f..f5b3edced2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java @@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java index 33addf7049..2479eeda97 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java @@ -94,6 +94,22 @@ public class ProcessRestRepository extends DSpaceRestRepository findByCurrentUser(Pageable pageable) { + + try { + Context context = obtainContext(); + long total = processService.countByUser(context, context.getCurrentUser()); + List processes = processService.findByUser(context, context.getCurrentUser(), + pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(processes, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Calls on the getBitstreams method to retrieve all the Bitstreams of this process * @param processId The processId of the Process to retrieve the Bitstreams for diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index d974a6d78a..09d65590b6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -37,6 +37,7 @@ import org.dspace.scripts.service.ScriptService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -56,29 +57,24 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); @@ -104,11 +100,17 @@ public class ScriptRestRepository extends DSpaceRestRepository dSpaceCommandLineParameters = processPropertiesToDSpaceCommandLineParameters(properties); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + if (scriptToExecute == null) { - throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); } - if (!scriptToExecute.isAllowedToExecute(context)) { - throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); + try { + if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) { + throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName + + " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", ")); + } + } catch (IllegalArgumentException e) { + throw new DSpaceBadRequestException("missed handle"); } RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler( context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java new file mode 100644 index 0000000000..b0e2a45c9d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation; + +import java.io.IOException; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.app.rest.exception.RESTBitstreamNotFoundException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Bitstream; +import org.dspace.content.service.BitstreamService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.stereotype.Component; + +/** + * A PATCH operation for removing bitstreams in bulk from the repository. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json" + * -d '[ + * {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"} + * ]' + * + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@Component +public class BitstreamRemoveOperation extends PatchOperation { + @Autowired + BitstreamService bitstreamService; + @Autowired + AuthorizeService authorizeService; + public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + + @Override + public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { + String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, ""); + Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete)); + if (bitstreamToDelete == null) { + throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); + } + authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); + + try { + bitstreamService.delete(context, bitstreamToDelete); + } catch (AuthorizeException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && + operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); + } + + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) + throws SQLException { + try { + authorizeService.authorizeAction(context, bitstream, operation); + } catch (AuthorizeException e) { + throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e); + } + } +} diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml new file mode 100644 index 0000000000..4a91ef051e --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -0,0 +1,1118 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java index e9b130e703..68511a9d89 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; @@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private Collection collection; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -126,6 +128,10 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index d6947d7567..8b34edb938 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,12 +7,16 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -21,9 +25,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -33,6 +39,7 @@ import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; @@ -41,6 +48,7 @@ import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; @@ -52,15 +60,20 @@ import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -79,6 +92,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -2370,6 +2389,513 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest )); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the rest.patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 24850cd11b..d1791ab872 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -63,22 +64,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(4))) + .andExpect(jsonPath("$.page.totalElements", is(5))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) - //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(4))) + //The array of browse index should have a size 5 + .andExpect(jsonPath("$._embedded.browses", hasSize(5))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"), - BrowseIndexMatcher.subjectBrowseIndex("asc") + BrowseIndexMatcher.subjectBrowseIndex("asc"), + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc") ))) ; } @@ -125,6 +127,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //Check that the JSON root matches the expected browse index + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint @@ -2142,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -2159,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 11fe58ac1d..72524709ec 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -153,6 +153,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -179,6 +182,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt return records; } -} \ No newline at end of file +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 0000000000..a3408a7736 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,677 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml + */ +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community parentCommunity1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community parentCommunity2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(parentCommunity1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index f1a1a095b1..72508a0dad 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -88,7 +88,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") - .build(); + .build(); context.restoreAuthSystemState(); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); @@ -116,6 +116,41 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio } } + @Test + public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") + .build(); + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); + metadataSchemaRest.setPrefix("test.SchemaName"); + metadataSchemaRest.setNamespace(TEST_NAMESPACE); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test,SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedTest() throws Exception { @@ -202,7 +237,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); metadataSchemaRest.setId(metadataSchema.getID()); - metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setPrefix(TEST_NAME); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) @@ -214,7 +249,33 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", MetadataschemaMatcher - .matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); + .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED))); + } + + @Test + public void update_schemaNameShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE) + .build(); + + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); + metadataSchemaRest.setId(metadataSchema.getID()); + metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadataschemas/" + metadataSchema.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataschemaMatcher + .matchEntry(TEST_NAME, TEST_NAMESPACE))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index 1826cd0fbb..a4a69ca8b1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -9,6 +9,7 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -49,12 +50,12 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { - private static final String ELEMENT = "test element"; - private static final String QUALIFIER = "test qualifier"; + private static final String ELEMENT = "test_element"; + private static final String QUALIFIER = "test_qualifier"; private static final String SCOPE_NOTE = "test scope_note"; - private static final String ELEMENT_UPDATED = "test element updated"; - private static final String QUALIFIER_UPDATED = "test qualifier updated"; + private static final String ELEMENT_UPDATED = "test_element_updated"; + private static final String QUALIFIER_UPDATED = "test_qualifier_updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private MetadataSchema metadataSchema; @@ -564,6 +565,70 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .andExpect(status().isUnprocessableEntity()); } + @Test + public void findByFieldName_sortByFieldNameASC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,ASC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField2), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField1) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + + @Test + public void findByFieldName_sortByFieldNameDESC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,DESC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField1), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField2) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + @Test public void createSuccess() throws Exception { @@ -575,7 +640,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,94 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { @@ -832,31 +988,81 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) - .build(); + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isOk()); + } + + @Test + public void update_elementShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); context.restoreAuthSystemState(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setElement(ELEMENT_UPDATED); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); + } + + @Test + public void update_qualifierShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) .perform(put("/api/core/metadatafields/" + metadataField.getID()) - .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) - .contentType(contentType)) - .andExpect(status().isOk()); + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( - metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) - )); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); } @Test - public void update_checkUpdatedInIndex() throws Exception { + public void update_checkNotUpdatedInIndex() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) @@ -885,27 +1091,27 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .perform(put("/api/core/metadatafields/" + metadataField.getID()) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) - .andExpect(status().isOk()); + .andExpect(status().isUnprocessableEntity()); - // new metadata field found in index + // new metadata field not found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", ELEMENT_UPDATED) .param("qualifier", QUALIFIER_UPDATED)) .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( - MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), - ELEMENT_UPDATED, QUALIFIER_UPDATED)) - )) - .andExpect(jsonPath("$.page.totalElements", is(1))); + .andExpect(jsonPath("$.page.totalElements", is(0))); - // original metadata field not found in index + // original metadata field found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", metadataField.getElement()) .param("qualifier", metadataField.getQualifier())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( + MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), + ELEMENT, QUALIFIER)) + )) + .andExpect(jsonPath("$.page.totalElements", is(1))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 5ac416e606..670d8e2f35 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess; +import static org.dspace.content.ProcessStatus.SCHEDULED; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -220,22 +222,35 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -243,25 +258,34 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -269,12 +293,18 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -783,27 +813,68 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(status().isBadRequest()); } + @Test + public void testFindByCurrentUser() throws Exception { + + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + ProcessBuilder.createProcess(context, admin, "mock-script", parameters) + .withStartAndEndTime("11/01/1990", "19/01/1990") + .build(); + Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("12/01/1990", "18/01/1990") + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/system/processes/search/own")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.processes", contains( + matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED), + matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED)))) + .andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2)))); + + } + @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..3b39d25121 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 07edfeec33..42c9f2c9f7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -44,6 +45,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; @@ -53,6 +55,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; import org.dspace.content.service.BitstreamService; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; @@ -123,12 +126,72 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test - public void findAllScriptsUnauthorizedTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) - .andExpect(status().isForbidden()); + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -222,6 +285,63 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -235,15 +355,51 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/system/scripts/mock-script-invalid")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test @@ -277,16 +433,6 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -322,7 +468,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -434,12 +580,19 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -601,9 +754,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } } - @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -611,6 +764,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index 82d611facf..80f27b6bbb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -31,7 +33,8 @@ public class BrowseIndexMatcher { public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +47,8 @@ public class BrowseIndexMatcher { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +60,8 @@ public class BrowseIndexMatcher { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +74,8 @@ public class BrowseIndexMatcher { public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -77,4 +83,22 @@ public class BrowseIndexMatcher { hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) ); } + + public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + return allOf( + hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 82bedf4a92..6483758802 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -110,6 +110,17 @@ public class FacetEntryMatcher { ); } + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index 330d31263b..9bbe430bff 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -60,6 +60,16 @@ public class FacetValueMatcher { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubjectWithAuthority(String label, String authority, int count) { return allOf( diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f9..e21f395f09 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,16 @@ public class RestDiscoverQueryBuilderTest { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), null, null); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487..ccb7d43a23 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import java.io.InputStream; import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public class TypeConversionTestScriptConfiguration siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 0000000000..1ff9570777 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 0000000000..666fb1e7d5 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
+ + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
+ + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
+ + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 0000000000..949d3b1250 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
+ + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
+ + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
+ + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
+
\ No newline at end of file diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5e..6cffa7ee66 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import java.util.Map; import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceChangeListener; import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ import org.springframework.context.support.ClassPathXmlApplicationContext; */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 0000000000..cb60a223a1 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 7b66eaf043..3a1d75eb56 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -75,6 +75,9 @@ + + @@ -658,6 +661,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1125,11 +1162,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> @@ -1207,7 +1244,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 9fbcb776d1..a6cd49bdf1 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -47,6 +47,26 @@ --> + + + diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 6421258c57..657e02b58d 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -25,6 +25,10 @@ rest.projections.full.max = 2 # This property determines the max embed depth for a SpecificLevelProjection rest.projection.specificLevel.maxEmbed = 5 +# This property determines the max amount of rest operations that can be performed at the same time, for example when +# batch removing bitstreams. The default value is set to 1000. +rest.patch.operations.limit = 1000 + # Define which configuration properties are exposed through the http:///api/config/properties/ # rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will # respond that the property wasn't found. This property can be defined multiple times to allow access to multiple diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index 5d67c17626..d1e416d2b0 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -30,6 +30,7 @@ + @@ -137,6 +138,14 @@ + + + + + + + + diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 611e77b27b..fb25f11598 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -57,9 +57,6 @@ - - - @@ -725,121 +722,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community - -withdrawn:true AND -discoverable:false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -938,6 +823,8 @@ + + @@ -1015,6 +902,7 @@ + @@ -1282,7 +1170,7 @@ - + @@ -1415,7 +1303,7 @@ - + @@ -1532,7 +1420,7 @@ - + @@ -1652,7 +1540,7 @@ - + @@ -1777,7 +1665,7 @@ - + @@ -1897,7 +1785,7 @@ - + @@ -2016,7 +1904,7 @@ - + @@ -2232,18 +2120,6 @@ - - - - - - - - - - - - diff --git a/dspace/config/spring/oai/oai.xml b/dspace/config/spring/oai/oai.xml index d701c164b6..6b103990de 100644 --- a/dspace/config/spring/oai/oai.xml +++ b/dspace/config/spring/oai/oai.xml @@ -21,4 +21,8 @@ + + + diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index caa646ba1b..df21afbc64 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -283,6 +283,7 @@ +