diff --git a/.codecov.yml b/.codecov.yml index a628d33cbe..326dd3e0b2 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9..91dcfb5dfe 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a..fb228e7041 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d..aa76c09c0a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java index 4e350562bc..4f2a225d3a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -9,7 +9,6 @@ package org.dspace.app.bulkedit; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c..ff83c3ecb2 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public class HarvestScriptConfiguration extends ScriptConfigu this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27..b37df5f5ea 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index ac9db76051..b32de11f7a 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -23,6 +23,7 @@ import java.util.UUID; import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; protected File workFile = null; @@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); - if (remoteUrl && workFile != null && workFile.exists()) { + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { workFile.delete(); } } @@ -322,14 +332,23 @@ public class ItemImport extends DSpaceRunnable { */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); if (!remoteUrl) { // manage zip via upload optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); } else { // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } - if (optionalFileStream.isPresent()) { + + if (validationFileStream.isPresent()) { + // validate zip file + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); @@ -337,10 +356,32 @@ public class ItemImport extends DSpaceRunnable { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } + } + /** * Read the mapfile * @param context diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 1a71a8c4c0..98d2469b71 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; import java.io.InputStream; import java.net.URL; import java.sql.SQLException; @@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport { // If this is a zip archive, unzip it first if (zip) { if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip( @@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport { // manage zip via remote url Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } } } diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index 2d304d8a7d..3f2675ea58 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3ca..afe1bb3d75 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter { File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + // Step 1: get an image from our PDF file, with PDF-specific processing options + f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d16243e3e3..408982d157 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,9 +116,17 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { return f2; } - public File getImageFile(File f, int page, boolean verbose) + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. + */ + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); @@ -155,7 +163,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { op.define("pdf:use-cropbox=true"); } - String s = "[" + page + "]"; + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); @@ -208,20 +216,20 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", - description, nsrc); + System.out.format("%s %s matches pattern and is replaceable.%n", + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee..867e684db8 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33deca..8d19597547 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,17 +167,24 @@ public class RequestItemEmailNotifier { if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } } else { Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } email.send(); } else { diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061..067c76cce8 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 2120848358..0f144fd69f 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -105,6 +108,13 @@ public class SubmissionConfigReader { */ private SubmissionConfig lastSubmissionConfig = null; + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); + /** * Load Submission Configuration from the * item-submission.xml configuration file @@ -152,6 +162,9 @@ public class SubmissionConfigReader { } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -287,7 +300,7 @@ public class SubmissionConfigReader { * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -334,18 +347,23 @@ public class SubmissionConfigReader { * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -355,7 +373,17 @@ public class SubmissionConfigReader { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } @@ -635,4 +663,4 @@ public class SubmissionConfigReader { } return results; } -} +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 5506b3c23f..28d39d911b 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -11,6 +11,9 @@ import java.io.Serializable; import java.util.Map; import org.apache.commons.lang3.BooleanUtils; +import org.dspace.content.InProgressSubmission; +import org.dspace.content.WorkspaceItem; +import org.hibernate.proxy.HibernateProxyHelper; /** * Class representing configuration for a single step within an Item Submission @@ -173,6 +176,38 @@ public class SubmissionStepConfig implements Serializable { return visibilityOutside; } + /** + * Check if given submission section object is hidden for the current submission scope + * + * @param obj the InProgressSubmission to check + * @return true if the submission section is hidden, false otherwise + */ + public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { + + String scopeToCheck = getScope(obj); + + if (scope == null || scopeToCheck == null) { + return false; + } + + String visibility = getVisibility(); + String visibilityOutside = getVisibilityOutside(); + + if (scope.equalsIgnoreCase(scopeToCheck)) { + return "hidden".equalsIgnoreCase(visibility); + } else { + return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside); + } + + } + + private String getScope(InProgressSubmission obj) { + if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) { + return "submission"; + } + return "workflow"; + } + /** * Get the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index f3c6022e02..afd82db863 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import static org.dspace.eperson.service.EPersonService.MD_PHONE; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -64,6 +66,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -391,7 +394,7 @@ public class LDAPAuthentication protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -406,9 +409,9 @@ public class LDAPAuthentication final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -547,7 +550,11 @@ public class LDAPAuthentication if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -693,48 +700,69 @@ public class LDAPAuthentication /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); int i = 1; String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; String dspaceGroupName = t[1]; - if (group == null) { - cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); - } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + } else { + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + // assign user to this group + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + i + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 34543c078a..fc438c234c 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -43,6 +43,7 @@ import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -654,60 +655,6 @@ public class AuthorizeServiceImpl implements AuthorizeService { } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { @@ -809,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 38b6aef45b..c781400bae 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -100,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 36679f94c6..e0a94833d7 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -470,24 +470,6 @@ public interface AuthorizeService { public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -532,6 +514,15 @@ public interface AuthorizeService { */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * @@ -603,7 +594,7 @@ public interface AuthorizeService { /** * Replace all the policies in the target object with exactly the same policies that exist in the source object - * + * * @param context DSpace Context * @param source source of policies * @param dest destination of inherited policies diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 8d065c21ce..6c38c8dd66 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ import org.dspace.sort.SortOption; * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -102,7 +104,7 @@ public final class BrowseIndex { * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { try { number = -1; tableBaseName = baseName; diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index e02367f6eb..f99aab852b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d6451..20c43e4bfc 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -8,6 +8,7 @@ package org.dspace.content; import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.REMOVE; import static org.dspace.core.Constants.WRITE; @@ -34,6 +35,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -74,14 +76,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bundle == null) { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "bundle_id=" + id)); + "bundle_id=" + id)); } return bundle; @@ -106,7 +108,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -167,28 +169,61 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + log.info(defaultBitstreamReadGroups.size()); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } @Override public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws AuthorizeException, SQLException, IOException { + throws AuthorizeException, SQLException, IOException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.REMOVE); log.info(LogHelper.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -221,9 +256,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -231,7 +266,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -246,7 +281,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -368,16 +403,16 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bitstream == null) { //This should never occur but just in case log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogHelper.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -386,9 +421,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogHelper.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -434,7 +469,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -442,10 +477,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -477,7 +512,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -485,10 +520,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -497,12 +532,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 092451296e..0c5ac94e1c 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1049,6 +1049,26 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i return (int) resp.getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + @Override public int countArchivedItem(Collection collection) throws ItemCountException { return ItemCounter.getInstance().getCount(collection); diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index a290cb0d99..f86b6690ad 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidQueue; @@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index c3deaacd80..31479e6206 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -60,7 +60,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index f25e2c4646..ec8f8769be 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader private SubmissionConfigReader itemSubmissionConfigReader; @@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); return ma.getParentChoice(authorityName, vocabularyId, locale); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 0000000000..bf8194dbd5 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.browse.BrowseIndex; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex extends BrowseIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2f..123626cd09 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ public class SolrAuthority implements ChoiceAuthority { } private String toQuery(String searchField, String text) { - return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\:") + "*) or " + searchField + ":(" + text - .toLowerCase().replaceAll(":", "\\:") + ")"; + return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\\\:") + "*) or " + searchField + ":(" + text + .toLowerCase().replaceAll(":", "\\\\:") + ")"; } @Override @@ -225,7 +225,7 @@ public class SolrAuthority implements ChoiceAuthority { log.debug("requesting label for key " + key + " using locale " + locale); } SolrQuery queryArgs = new SolrQuery(); - queryArgs.setQuery("id:" + key); + queryArgs.setQuery("id:" + key.replaceAll(":", "\\\\:")); queryArgs.setRows(1); QueryResponse searchResponse = getSearchService().search(queryArgs); SolrDocumentList docs = searchResponse.getResults(); diff --git a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java index eb34de29c1..a9fd24e947 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java @@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.Choice; import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.Choices; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; /** * Broker for ChoiceAuthority plugins, and for other information configured @@ -220,4 +221,7 @@ public interface ChoiceAuthorityService { * @return the parent Choice object if any */ public Choice getParentChoice(String authorityName, String vocabularyId, String locale); + + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab); + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 69bac319c6..95ec40c7a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -14,6 +14,7 @@ import java.util.List; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; @@ -97,4 +98,26 @@ public interface ProcessDAO extends GenericDAO { List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 23ce6ce381..d719b5006c 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -24,6 +24,7 @@ import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; @@ -168,6 +169,33 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro return list(context, criteriaQuery, false, Process.class, -1, -1); } + @Override + public List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID))); + criteriaQuery.orderBy(orderList); + + return list(context, criteriaQuery, false, Process.class, limit, offset); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index d1ad6dcef2..3a562d4e5b 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -456,5 +456,19 @@ public interface CollectionService public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; int countArchivedItem(Collection collection) throws ItemCountException; } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768..2587e6b025 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public class CurationScriptConfiguration extends ScriptConfi } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba..8707b733a6 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public class IndexDiscoveryScriptConfiguration extends Sc return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index f14ca124f4..60bf52836b 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,9 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.utils.DiscoverQueryBuilder; @@ -73,35 +76,80 @@ public class SearchUtils { searchService = null; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * + * + * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -113,6 +161,18 @@ public class SearchUtils { return configurationService.getDiscoveryConfiguration(configurationName); } + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } + public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); return manager @@ -127,47 +187,55 @@ public class SearchUtils { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * + * @param context the database context * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Context context, Item item) + throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 52e0043ff4..7aece5acf3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,10 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + // Add _keyword and _filter fields which are necessary to support filtering and faceting + // for the file names + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + // Add _keyword and _filter fields which are necessary to support filtering and + // faceting for the descriptions + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", + description); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", + description); } } } @@ -65,4 +75,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { } } } -} \ No newline at end of file +} diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index c02c83ece6..6cb93e2993 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,23 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,9 +33,18 @@ import org.dspace.services.factory.DSpaceServicesFactory; */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. + */ + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); + public Map getMap() { return map; } @@ -41,25 +61,98 @@ public class DiscoveryConfigurationService { this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } - return getDiscoveryConfiguration(name); } - public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { + // Fall back to default configuration + if (dso == null) { + return getDiscoveryConfiguration(null, true); + } + + // Attempt to retrieve cached configuration by UUID + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); + } + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); + } + + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } + + return configuration; + } + + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { + return getDiscoveryConfiguration(name, true); + } + + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -67,12 +160,23 @@ public class DiscoveryConfigurationService { return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, - final IndexableObject dso) { + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(context, indexableObject); } } @@ -92,13 +196,25 @@ public class DiscoveryConfigurationService { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName( - DiscoveryConfigurationService.class - .getName(), - DiscoveryConfigurationService.class); + DiscoveryConfigurationService.class + .getName(), + DiscoveryConfigurationService.class); for (String key : mainService.getMap().keySet()) { System.out.println(key); @@ -126,7 +242,7 @@ public class DiscoveryConfigurationService { System.out.println("Recent submissions configuration:"); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration - .getRecentSubmissionConfiguration(); + .getRecentSubmissionConfiguration(); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index e251d1bc51..cd1a4eecb8 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -9,6 +9,7 @@ package org.dspace.discovery.configuration; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -22,6 +23,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -30,6 +36,14 @@ public class DiscoverySortConfiguration { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index c2bacfe502..817be7848d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 61827747b7..005f9b4247 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -31,6 +31,7 @@ import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -93,6 +94,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace @@ -415,7 +418,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl implements List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index e7c786d5f8..e5a90907c7 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -27,13 +27,14 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public String mint(Context context, DSpaceObject dso) throws IdentifierException { return mint(context, dso, this.filter); @@ -66,7 +80,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -76,7 +90,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -134,7 +148,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } @@ -145,7 +159,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return doi; + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; } @Override @@ -153,6 +167,21 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { register(context, dso, identifier, this.filter); } + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + + String doi = mint(context, dso, filter); + + register(context, dso, doi, filter); + + return doi; + } + @Override public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { @@ -162,7 +191,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -170,10 +199,10 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index b29d47f406..4f9efd2206 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -35,6 +35,7 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ import org.springframework.stereotype.Component; * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 0fac326ca1..78ad6b7b79 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -30,6 +30,7 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +40,8 @@ import org.springframework.stereotype.Component; * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 0000000000..dec0b050f3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; +import org.joda.time.LocalDate; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the crossref-integration.xml file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + SimpleDateFormat issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = new LocalDate( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + } else if (date.has(0) && date.has(1)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) + .withMonthOfYear(date.get(1).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy"); + } + values.add(issuedDateFormat.format(issuedDate.toDate())); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index 6536026058..add9caef1b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -15,8 +15,8 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.DCDate; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -107,28 +107,30 @@ public class PubmedDateMetadatumContributor implements MetadataContributor LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size()) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); - values.add(metadataFieldMapping.toDCValue(field, formatter.format(date))); - break; + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor implements MetadataContributor } j++; } - if (dcDate == null) { + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); + } else { log.info( "Failed parsing " + dateString + ", check " + "the configured dataformats in config/spring/api/pubmed-integration.xml"); diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index a567c6e7a7..07a79384c7 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -80,7 +80,7 @@ public class OrcidHistory implements ReloadableEntity { * A description of the synchronized resource. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidHistory implements ReloadableEntity { * the owner itself. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "metadata") private String metadata; @@ -104,7 +104,7 @@ public class OrcidHistory implements ReloadableEntity { * The response message incoming from ORCID. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "response_message") private String responseMessage; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 9261f14eea..65b66cd20c 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -65,7 +65,7 @@ public class OrcidQueue implements ReloadableEntity { * A description of the resource to be synchronized. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidQueue implements ReloadableEntity { */ @Lob @Column(name = "metadata") - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") private String metadata; /** diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c0..88a1033eca 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 190d214a3c..eab3ba460c 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -71,7 +71,7 @@ public class Process implements ReloadableEntity { private ProcessStatus processStatus; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index 33fea75add..2e14aeaa36 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -129,6 +129,11 @@ public class ProcessServiceImpl implements ProcessService { return processes; } + @Override + public List findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException { + return processDAO.findByUser(context, eperson, limit, offset); + } + @Override public void start(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.RUNNING); @@ -311,6 +316,11 @@ public class ProcessServiceImpl implements ProcessService { return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); } + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + return processDAO.countByUser(context, user); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a51..abb700cb10 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public class ScriptServiceImpl implements ScriptService { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f44..642409a924 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,28 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,14 +81,23 @@ public abstract class ScriptConfiguration implements B * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } /** * The getter for the options of the Script diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index ce6a173b0e..c6fc248881 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -255,4 +255,26 @@ public interface ProcessService { */ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cb..7d1015c8e2 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ import org.springframework.beans.factory.annotation.Autowired; public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration scriptLocations = new ArrayList<>(); @@ -946,26 +938,6 @@ public class DatabaseUtils { // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1214,11 +1186,6 @@ public class DatabaseUtils { // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1322,11 +1289,6 @@ public class DatabaseUtils { // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1552,8 +1514,6 @@ public class DatabaseUtils { String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java deleted file mode 100644 index 95939f9902..0000000000 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.rdbms.hibernate; - -import org.apache.commons.lang.StringUtils; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.hibernate.type.AbstractSingleColumnStandardBasicType; -import org.hibernate.type.descriptor.java.StringTypeDescriptor; -import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; -import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; -import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; - -/** - * A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle. - * PostgreSQL doesn't have a CLOB type, instead it's a TEXT field. - * Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle. - * https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java - * - * This Type checks if we are using PostgreSQL. - * If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType) - * If not, it uses default CLOB (which works for other databases). - */ -public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { - - public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); - - public DatabaseAwareLobType() { - super( getDbDescriptor(), StringTypeDescriptor.INSTANCE ); - } - - public static SqlTypeDescriptor getDbDescriptor() { - if ( isPostgres() ) { - return LongVarcharTypeDescriptor.INSTANCE; - } else { - return ClobTypeDescriptor.DEFAULT; - } - } - - private static boolean isPostgres() { - ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - String dbDialect = configurationService.getProperty("db.dialect"); - - return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL"); - } - - @Override - public String getName() { - return "database_aware_lob"; - } -} - diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 842fc15e16..f0c4e4e179 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,13 +78,6 @@ public class MigrationUtils { constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + @@ -160,9 +153,6 @@ public class MigrationUtils { case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -208,9 +198,6 @@ public class MigrationUtils { case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -256,9 +243,6 @@ public class MigrationUtils { case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9..758e745ddc 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ import org.flywaydb.core.api.migration.Context; * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e53..37100a17f9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ import org.flywaydb.core.api.migration.Context; * from 1.5 to 1.6 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e..8e2be91127 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ import org.flywaydb.core.api.migration.Context; * this column must be renamed to "resource_id". *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc9..0361e68053 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c..4c1cf33653 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration extends BaseJ String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java index 41b15ddd7a..894d3491a1 100644 --- a/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/submit/migration/SubmissionFormsMigrationCliScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.submit.migration; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script @@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired; public class SubmissionFormsMigrationCliScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; @Override - public boolean isAllowedToExecute(Context context) { + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index dbbb7bbc5e..b29af4e773 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -11,6 +11,8 @@ import java.text.ParseException; import java.util.Date; import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; @@ -21,6 +23,7 @@ import org.dspace.core.Context; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** @@ -28,9 +31,8 @@ import org.springframework.beans.factory.annotation.Autowired; * set permission on a file. An option is defined by a name such as "open * access", "embargo", "restricted access", etc. and some optional attributes to * better clarify the constraints and input available to the user. For instance - * an embargo option could allow to set a start date not longer than 3 years, - * etc - * + * an embargo option could allow to set a start date not longer than 3 years. + * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class AccessConditionOption { @@ -44,9 +46,9 @@ public class AccessConditionOption { @Autowired private ResourcePolicyService resourcePolicyService; - DateMathParser dateMathParser = new DateMathParser(); + private static final Logger LOG = LogManager.getLogger(); - /** An unique name identifying the access contion option **/ + /** A unique name identifying the access condition option. **/ private String name; /** @@ -147,6 +149,9 @@ public class AccessConditionOption { * startDate should be null. Otherwise startDate may not be null. * @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false, * endDate should be null. Otherwise endDate may not be null. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + * @throws ParseException passed through (indicates problem with a date). */ public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description, Date startDate, Date endDate) @@ -160,7 +165,7 @@ public class AccessConditionOption { /** * Validate ResourcePolicy and after update it - * + * * @param context DSpace context * @param resourcePolicy ResourcePolicy to update * @throws SQLException If database error @@ -175,17 +180,25 @@ public class AccessConditionOption { } /** - * Validate the policy properties, throws exceptions if any is not valid - * - * @param context DSpace context - * @param name Name of the resource policy - * @param startDate Start date of the resource policy. If {@link #getHasStartDate()} - * returns false, startDate should be null. Otherwise startDate may not be null. - * @param endDate End date of the resource policy. If {@link #getHasEndDate()} - * returns false, endDate should be null. Otherwise endDate may not be null. + * Validate the policy properties, throws exceptions if any is not valid. + * + * @param context DSpace context. + * @param name Name of the resource policy. + * @param startDate Start date of the resource policy. If + * {@link #getHasStartDate()} returns false, startDate + * should be null. Otherwise startDate may not be null. + * @param endDate End date of the resource policy. If + * {@link #getHasEndDate()} returns false, endDate should + * be null. Otherwise endDate may not be null. + * @throws IllegalStateException if a date is required and absent, + * a date is not required and present, or a date exceeds its + * configured maximum. + * @throws ParseException passed through. */ private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) - throws SQLException, AuthorizeException, ParseException { + throws IllegalStateException, ParseException { + LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", + name, startDate, endDate); if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); } @@ -199,29 +212,33 @@ public class AccessConditionOption { throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date."); } + DateMathParser dateMathParser = new DateMathParser(); + Date latestStartDate = null; if (Objects.nonNull(getStartDateLimit())) { - latestStartDate = dateMathParser.parseMath(getStartDateLimit()); + latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit())); } Date latestEndDate = null; if (Objects.nonNull(getEndDateLimit())) { - latestEndDate = dateMathParser.parseMath(getEndDateLimit()); + latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit())); } + LOG.debug(" latestStartDate {}, latestEndDate {}", + latestStartDate, latestEndDate); // throw if startDate after latestStartDate if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) { throw new IllegalStateException(String.format( - "The start date of access condition %s should be earlier than %s from now.", - getName(), getStartDateLimit() + "The start date of access condition %s should be earlier than %s from now (%s).", + getName(), getStartDateLimit(), dateMathParser.getNow() )); } // throw if endDate after latestEndDate if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) { throw new IllegalStateException(String.format( - "The end date of access condition %s should be earlier than %s from now.", - getName(), getEndDateLimit() + "The end date of access condition %s should be earlier than %s from now (%s).", + getName(), getEndDateLimit(), dateMathParser.getNow() )); } } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index 52685b563d..dd61fab967 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { diff --git a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java index 7c3e13a28e..9ff252e8ce 100644 --- a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java +++ b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java @@ -26,12 +26,15 @@ import java.util.Map; import java.util.TimeZone; import java.util.regex.Pattern; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** - * This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency: - * https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr - * /util/DateMathParser.java + * This class (Apache license) is copied from Apache Solr, adding some tweaks to + * resolve an unneeded dependency. See + * the original. * + *

* A Simple Utility class for parsing "math" like strings relating to Dates. * *

@@ -78,7 +81,7 @@ import java.util.regex.Pattern; * "setNow" in the interim). The default value of 'now' is * the time at the moment the DateMathParser instance is * constructed, unless overridden by the {@link CommonParams#NOW NOW} - * request param. + * request parameter. *

* *

@@ -88,7 +91,7 @@ import java.util.regex.Pattern; * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default * TimeZone used is UTC unless overridden by the * {@link CommonParams#TZ TZ} - * request param. + * request parameter. *

* *

@@ -102,6 +105,8 @@ import java.util.regex.Pattern; */ public class DateMathParser { + private static final Logger LOG = LogManager.getLogger(); + public static final TimeZone UTC = TimeZone.getTimeZone("UTC"); /** @@ -119,12 +124,12 @@ public class DateMathParser { /** * A mapping from (uppercased) String labels identifying time units, - * to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to + * to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to * set/add/roll that unit of measurement. * *

* A single logical unit of time might be represented by multiple labels - * for convenience (ie: DATE==DAYS, + * for convenience (i.e. DATE==DAYS, * MILLI==MILLIS) *

* @@ -220,6 +225,7 @@ public class DateMathParser { * * @param now an optional fixed date to use as "NOW" * @param val the string to parse + * @return result of applying the parsed expression to "NOW". * @throws Exception */ public static Date parseMath(Date now, String val) throws Exception { @@ -308,6 +314,7 @@ public class DateMathParser { /** * Defines this instance's concept of "now". * + * @param n new value of "now". * @see #getNow */ public void setNow(Date n) { @@ -316,12 +323,12 @@ public class DateMathParser { /** * Returns a clone of this instance's concept of "now" (never null). - * * If setNow was never called (or if null was specified) then this method * first defines 'now' as the value dictated by the SolrRequestInfo if it * exists -- otherwise it uses a new Date instance at the moment getNow() * is first called. * + * @return "now". * @see #setNow * @see SolrRequestInfo#getNOW */ @@ -334,9 +341,12 @@ public class DateMathParser { } /** - * Parses a string of commands relative "now" are returns the resulting Date. + * Parses a date expression relative to "now". * - * @throws ParseException positions in ParseExceptions are token positions, not character positions. + * @param math a date expression such as "+24MONTHS". + * @return the result of applying the expression to the current time. + * @throws ParseException positions in ParseExceptions are token positions, + * not character positions. */ public Date parseMath(String math) throws ParseException { /* check for No-Op */ @@ -344,6 +354,8 @@ public class DateMathParser { return getNow(); } + LOG.debug("parsing {}", math); + ZoneId zoneId = zone.toZoneId(); // localDateTime is a date and time local to the timezone specified LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime(); @@ -394,11 +406,44 @@ public class DateMathParser { } } + LOG.debug("returning {}", localDateTime); return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant()); } private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); + /** + * For manual testing. With one argument, test one-argument parseMath. + * With two (or more) arguments, test two-argument parseMath. + * + * @param argv date math expressions. + * @throws java.lang.Exception passed through. + */ + public static void main(String[] argv) + throws Exception { + DateMathParser parser = new DateMathParser(); + try { + Date parsed; + + if (argv.length <= 0) { + System.err.println("Date math expression(s) expected."); + } + + if (argv.length > 0) { + parsed = parser.parseMath(argv[0]); + System.out.format("Applied %s to implicit current time: %s%n", + argv[0], parsed.toString()); + } + + if (argv.length > 1) { + parsed = DateMathParser.parseMath(new Date(), argv[1]); + System.out.format("Applied %s to explicit current time: %s%n", + argv[1], parsed.toString()); + } + } catch (ParseException ex) { + System.err.format("Oops: %s%n", ex.getMessage()); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java new file mode 100644 index 0000000000..87d354a7f6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * Various manipulations of dates and times. + * + * @author mwood + */ +public class TimeHelpers { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Never instantiate this class. + */ + private TimeHelpers() {} + + /** + * Set a Date's time to midnight UTC. + * + * @param from some date-time. + * @return midnight UTC of the supplied date-time. + */ + public static Date toMidnightUTC(Date from) { + GregorianCalendar calendar = new GregorianCalendar(UTC); + calendar.setTime(from); + calendar.set(GregorianCalendar.HOUR_OF_DAY, 0); + calendar.set(GregorianCalendar.MINUTE, 0); + calendar.set(GregorianCalendar.SECOND, 0); + calendar.set(GregorianCalendar.MILLISECOND, 0); + return calendar.getTime(); + } +} diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 9be443f5ea..efbbeedde0 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -122,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6a..edebe6e087 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca..87e114ca53 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. + These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). +in `DatabaseUtils`. -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -## Oracle vs H2 script differences - -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). - -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. - -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 6cef123859..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - ---- -WARNING: Oracle Support is deprecated. -See https://github.com/DSpace/DSpace/issues/8214 ---- - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql deleted file mode 100644 index f4b2737fb3..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql deleted file mode 100644 index f96cddbe7f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc52..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec9..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql deleted file mode 100644 index c86cfe3122..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd2..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a5084..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f036..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e19..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 7478397446..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aa..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql deleted file mode 100644 index 30cfae91c8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index fc1c0b2e23..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id raw(16) NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id raw(16) NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f437..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index cebae09f65..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR2(50); -ALTER TABLE relationship ADD rightward_value VARCHAR2(50); - -ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type; -ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql deleted file mode 100644 index 0db294c1c1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ /dev/null @@ -1,14 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql deleted file mode 100644 index f71173abe6..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Drop the 'workflowitem' and 'tasklistitem' tables ------------------------------------------------------------------------------------ - -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql deleted file mode 100644 index 95d07be477..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ /dev/null @@ -1,13 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD tilted INTEGER; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql deleted file mode 100644 index 5a6abda041..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql +++ /dev/null @@ -1,28 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------- -UPDATE metadatavalue SET dspace_object_id = (SELECT uuid - FROM collection - WHERE template_item_id = dspace_object_id) -WHERE dspace_object_id IN (SELECT template_item_id - FROM Collection) - AND metadata_field_id - IN (SELECT metadata_field_id - FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql deleted file mode 100644 index ae8f1e7ef5..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql +++ /dev/null @@ -1,15 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------------- ----- ALTER table collection -------------------------------------------------------------------------------------- - -ALTER TABLE collection DROP COLUMN workflow_step_1; -ALTER TABLE collection DROP COLUMN workflow_step_2; -ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql deleted file mode 100644 index 9c39c15e66..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Make sure the metadatavalue.place column starts at 0 instead of 1 ----------------------------------------------------- -MERGE INTO metadatavalue mdv -USING ( - SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace - FROM metadatavalue - GROUP BY dspace_object_id, metadata_field_id -) mp -ON ( - mdv.dspace_object_id = mp.dspace_object_id - AND mdv.metadata_field_id = mp.metadata_field_id - AND mp.minplace > 0 -) -WHEN MATCHED THEN UPDATE -SET mdv.place = mdv.place - mp.minplace; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql deleted file mode 100644 index 3fe424cf6c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql +++ /dev/null @@ -1,54 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create tables for ORCID Queue and History ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_queue_id_seq; - -CREATE TABLE orcid_queue -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - operation VARCHAR(255), - metadata CLOB, - attempts INTEGER, - CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), - CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); - - -CREATE SEQUENCE orcid_history_id_seq; - -CREATE TABLE orcid_history -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - timestamp_last_attempt TIMESTAMP, - response_message CLOB, - status INTEGER, - metadata CLOB, - operation VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - CONSTRAINT orcid_history_pkey PRIMARY KEY (id), - CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql deleted file mode 100644 index 14bf853143..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for ORCID access tokens ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_token_id_seq; - -CREATE TABLE orcid_token -( - id INTEGER NOT NULL, - eperson_id RAW(16) NOT NULL UNIQUE, - profile_item_id RAW(16), - access_token VARCHAR2(100) NOT NULL, - CONSTRAINT orcid_token_pkey PRIMARY KEY (id), - CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), - CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql deleted file mode 100644 index 0e7d417ae5..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store Groups related to a Process on its creation -------------------------------------------------------------------------------- - -CREATE TABLE Process2Group -( - process_id INTEGER REFERENCES Process(process_id), - group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, - CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql deleted file mode 100644 index 3eb9ae6dd4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) -ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql deleted file mode 100644 index 3862830230..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql +++ /dev/null @@ -1,45 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ - - -CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ -CREATE TABLE if NOT EXISTS subscription_parameter -( - subscription_parameter_id INTEGER NOT NULL, - name VARCHAR(255), - value VARCHAR(255), - subscription_id INTEGER NOT NULL, - CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), - CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) - REFERENCES subscription (subscription_id) ON DELETE CASCADE -); --- -- - -ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; ----- -- -ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); --- -UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; -ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; ----- -- -ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; --- -- -INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) -SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql deleted file mode 100644 index c7bb0b502e..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql +++ /dev/null @@ -1,78 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store supervision orders -------------------------------------------------------------------------------- - -CREATE TABLE supervision_orders -( - id INTEGER PRIMARY KEY, - item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, - eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE -); - -CREATE SEQUENCE supervision_orders_seq; - -INSERT INTO supervision_orders (id, item_id, eperson_group_id) -SELECT supervision_orders_seq.nextval AS id, w.item_id, e.uuid -FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w -ON ew.workspace_item_id = w.workspace_item_id -INNER JOIN epersongroup e -ON ew.eperson_group_id = e.uuid; - - --- UPDATE policies for supervision orders --- items, bundles and bitstreams - -DECLARE -BEGIN - -FOR rec IN -( -SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id -INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL -) - -LOOP - -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' -where dspace_object = rec.dspace_object -AND epersongroup_id = rec.eperson_group_id -AND rptype IS NULL; - -END LOOP; -END; - -------------------------------------------------------------------------------- --- drop epersongroup2workspaceitem table -------------------------------------------------------------------------------- - -DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql deleted file mode 100644 index 9d13138fda..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql +++ /dev/null @@ -1,22 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for System wide alerts ------------------------------------------------------------------------------------ - -CREATE SEQUENCE alert_id_seq; - -CREATE TABLE systemwidealert -( - alert_id INTEGER NOT NULL PRIMARY KEY, - message VARCHAR(512), - allow_sessions VARCHAR(64), - countdown_to TIMESTAMP, - active BOOLEAN -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql deleted file mode 100644 index 8aec44a7f6..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Update short description for PNG mimetype in the bitstream format registry --- See: https://github.com/DSpace/DSpace/pull/8722 ------------------------------------------------------------------------------------ - -UPDATE bitstreamformatregistry -SET short_description='PNG' -WHERE short_description='image/png' - AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql deleted file mode 100644 index 509e0a2869..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -ALTER TABLE orcid_history MODIFY (description CLOB); -ALTER TABLE orcid_queue MODIFY (description CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912..e16e4c6d4c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c9..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836ee..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc9948..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c69..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 6d8ae0c2f0..452460501a 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -23,6 +23,7 @@ + @@ -54,7 +55,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission submit.progressbar.describe.stepone @@ -149,6 +149,34 @@ org.dspace.app.rest.submit.step.ShowIdentifiersStep identifiers + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + workflow + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + submission + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + submission + + @@ -222,6 +250,13 @@ + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 7438fda852..6b7349616e 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -436,6 +436,35 @@ it, please enter the types and the actual numbers or codes. + +
+ + + dc + title + + false + + onebox + Field required + + +
+ +
+ + + dc + type + + false + + onebox + Field required + + +
+ diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 411e8de4df..08ae3af4ae 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.nio.file.Files; @@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,6 +48,7 @@ import org.junit.Test; public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; private static final String publicationTitle = "A Tale of Two Cities"; private static final String personTitle = "Person Test"; @@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private Collection collection; private Path tempDir; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test @@ -254,6 +262,23 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkRelationship(); } + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + @Test public void resumeImportItemBySafWithMetadataOnly() throws Exception { // create simple SAF diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index ff1083d318..b20515017a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -162,8 +162,8 @@ public abstract class AbstractDSpaceObjectBuilder return (B) this; } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. + * If another ADMIN policy is in place for an eperson it will be replaced * * @param dso * the DSpaceObject on which grant the permission diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index a01aef8498..dfacd0cec3 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 70dea309f2..3e5ab0f38f 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -353,9 +353,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { } /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param ePerson epersons to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 86573940e4..0631e1b55a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -113,6 +113,9 @@ public class ProcessBuilder extends AbstractBuilder { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index 8b7bc2978b..9d786d4761 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -189,6 +189,10 @@ public class WorkspaceItemBuilder extends AbstractBuilderworkspace + * DiscoveryConfiguration
+ * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + context.restoreAuthSystemState(); + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration + */ +public class TimeHelpersTest { + /** + * Test of toMidnightUTC method, of class TimeHelpers. + */ + @Test + public void testToMidnightUTC() { + System.out.println("toMidnightUTC"); + Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant()); + Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant()); + Date result = TimeHelpers.toMidnightUTC(from); + assertEquals(expResult, result); + } +} diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf new file mode 100644 index 0000000000..5b3749cbff Binary files /dev/null and b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf differ diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java index da50f33582..9e6022548d 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -118,7 +118,8 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { } /** - * Constructs a solr search URL. + * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2. + * https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2 * * @param query the search terms * @param manifestId the id of the manifest in which to search @@ -132,8 +133,9 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { solrQuery.set("hl.ocr.fl", "ocr_text"); solrQuery.set("hl.ocr.contextBlock", "line"); solrQuery.set("hl.ocr.contextSize", "2"); - solrQuery.set("hl.snippets", "10"); - solrQuery.set("hl.ocr.trackPages", "off"); + solrQuery.set("hl.snippets", "8192"); + solrQuery.set("hl.ocr.maxPassages", "8192"); + solrQuery.set("hl.ocr.trackPages", "on"); solrQuery.set("hl.ocr.limitBlock","page"); solrQuery.set("hl.ocr.absoluteHighlights", "true"); diff --git a/dspace-oai/pom.xml b/dspace-oai/pom.xml index 27efba73d0..8692482d78 100644 --- a/dspace-oai/pom.xml +++ b/dspace-oai/pom.xml @@ -35,24 +35,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - commons-cli diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java new file mode 100644 index 0000000000..6b3c5ded98 --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -0,0 +1,68 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app.plugins; + +import java.sql.SQLException; +import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; +import org.dspace.xoai.util.ItemUtils; + +/** + * AccessStatusElementItemCompilePlugin aims to add structured information about the + * Access Status of the item (if any). + + * The xoai document will be enriched with a structure like that + *
+ * {@code
+ *   
+ *       
+ *          open.access
+ *       
+ *   
+ * }
+ * 
+ * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java new file mode 100644 index 0000000000..aa511bcb92 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; + +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.repository.BitstreamRestRepository; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * REST controller for handling bulk updates to Bitstream resources. + *

+ * This controller is responsible for handling requests to the bitstream category, which allows for updating + * multiple bitstream resources in a single operation. + *

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@RestController +@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME) +public class BitstreamCategoryRestController { + @Autowired + BitstreamRestRepository bitstreamRestRepository; + + /** + * Handles PATCH requests to the bitstream category for bulk updates of bitstream resources. + * + * @param request the HTTP request object. + * @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied. + * @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a + * HTTP 204 No Content response since currently only a delete operation is supported. + * @throws SQLException if an error occurs while accessing the database. + * @throws AuthorizeException if the user is not authorized to perform the requested operation. + */ + @RequestMapping(method = RequestMethod.PATCH) + public ResponseEntity> patch(HttpServletRequest request, + @RequestBody(required = true) JsonNode jsonNode) + throws SQLException, AuthorizeException { + Context context = obtainContext(request); + bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode); + return ResponseEntity.noContent().build(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java index e297dab44c..a6dbf3496e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java @@ -120,7 +120,7 @@ public class ItemTemplateRestController { * @throws SQLException * @throws AuthorizeException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'WRITE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'WRITE')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @PathVariable UUID uuid, @RequestBody(required = true) JsonNode jsonNode) @@ -153,7 +153,7 @@ public class ItemTemplateRestController { * @throws AuthorizeException * @throws IOException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'DELETE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'DELETE')") @RequestMapping(method = RequestMethod.DELETE) public ResponseEntity> deleteTemplateItem(HttpServletRequest request, @PathVariable UUID uuid) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 79ca381753..665504139c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public class OpenSearchController { if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso("site", container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 196cade5dd..70149bbb6b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -12,18 +12,23 @@ import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.converter.ConverterService; +import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.model.ProcessRest; import org.dspace.app.rest.model.ScriptRest; import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.PathVariable; @@ -48,6 +53,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private ScriptService scriptService; + @Autowired private RequestService requestService; @@ -59,8 +67,8 @@ public class ScriptProcessesController { * @return The ProcessResource object for the created process * @throws Exception If something goes wrong */ - @RequestMapping(method = RequestMethod.POST) - @PreAuthorize("hasAuthority('ADMIN')") + @RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") public ResponseEntity> startProcess( @PathVariable(name = "name") String scriptName, @RequestParam(name = "file", required = false) List files) @@ -75,4 +83,21 @@ public class ScriptProcessesController { return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } + @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public ResponseEntity> startProcessInvalidMimeType( + @PathVariable(name = "name") String scriptName) + throws Exception { + if (log.isTraceEnabled()) { + log.trace("Starting Process for Script with name: " + scriptName); + } + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest()); + ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + + if (scriptToExecute == null) { + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); + } + throw new DSpaceBadRequestException("Invalid mimetype"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java index ce7ca34918..fa1d145011 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java @@ -86,6 +86,10 @@ public abstract class AInprogressItemConverter metadataList = new ArrayList(); - if (obj.isMetadataIndex()) { + String id = obj.getName(); + if (obj instanceof DSpaceControlledVocabularyIndex) { + DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; + metadataList = new ArrayList<>(vocObj.getMetadataFields()); + id = vocObj.getVocabulary().getPluginInstanceName(); + bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL); + } else if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); + bir.setBrowseType(BROWSE_TYPE_FLAT); } + bir.setId(id); bir.setMetadataList(metadataList); List sortOptionsList = new ArrayList(); @@ -52,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverter { - DateMathParser dateMathParser = new DateMathParser(); - @Override public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, Projection projection) { SubmissionAccessOptionRest model = new SubmissionAccessOptionRest(); model.setId(config.getName()); model.setCanChangeDiscoverable(config.getCanChangeDiscoverable()); model.setProjection(projection); + DateMathParser dateMathParser = new DateMathParser(); for (AccessConditionOption option : config.getOptions()) { AccessConditionOptionRest optionRest = new AccessConditionOptionRest(); optionRest.setHasStartDate(option.getHasStartDate()); optionRest.setHasEndDate(option.getHasEndDate()); if (StringUtils.isNotBlank(option.getStartDateLimit())) { try { - optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit())); + Date requested = dateMathParser.parseMath(option.getStartDateLimit()); + optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong start date limit configuration for the access condition " + "option named " + option.getName()); @@ -49,7 +52,8 @@ public class SubmissionAccessOptionConverter } if (StringUtils.isNotBlank(option.getEndDateLimit())) { try { - optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit())); + Date requested = dateMathParser.parseMath(option.getEndDateLimit()); + optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong end date limit configuration for the access condition " + "option named " + option.getName()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index 5e32247ee4..4ad1e47934 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -174,6 +174,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH GroupNameNotProvidedException.class, GroupHasPendingWorkflowTasksException.class, PasswordNotValidException.class, + RESTBitstreamNotFoundException.class }) protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, TranslatableException ex) throws IOException { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java new file mode 100644 index 0000000000..a0b48e3c0d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.exception; + +import java.text.MessageFormat; + +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; + +/** + *

Extend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException { + + public static String uuid; + + /** + * @param formatStr string with placeholders, ideally obtained using {@link I18nUtil} + * @return message with bitstream id substituted + */ + private static String formatMessage(String formatStr) { + MessageFormat fmt = new MessageFormat(formatStr); + return fmt.format(new String[]{uuid}); + } + + public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message"; + + public RESTBitstreamNotFoundException(String uuid) { + super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY))); + RESTBitstreamNotFoundException.uuid = uuid; + } + + public String getMessageKey() { + return MESSAGE_KEY; + } + + public String getLocalizedMessage(Context context) { + return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java index ee70dbf431..9e515984fe 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java @@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory { public static final String CATEGORY = RestAddressableModel.DISCOVER; - public static final String ITEMS = "items"; - public static final String ENTRIES = "entries"; + public static final String LINK_ITEMS = "items"; + public static final String LINK_ENTRIES = "entries"; + public static final String LINK_VOCABULARY = "vocabulary"; - boolean metadataBrowse; + // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types, + // etc. the second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_VALUE_LIST = "valueList"; + // if the browse index has one level: the full list of items + public static final String BROWSE_TYPE_FLAT = "flatBrowse"; + // if the browse index should display the vocabulary tree. The 1st level shows the tree. + // The second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse"; + // Shared fields + String browseType; @JsonProperty(value = "metadata") List metadataList; + // Single browse index fields + @JsonInclude(JsonInclude.Include.NON_NULL) String dataType; - + @JsonInclude(JsonInclude.Include.NON_NULL) List sortOptions; - + @JsonInclude(JsonInclude.Include.NON_NULL) String order; + // Hierarchical browse fields + @JsonInclude(JsonInclude.Include.NON_NULL) + String facetType; + @JsonInclude(JsonInclude.Include.NON_NULL) + String vocabulary; + @JsonIgnore @Override public String getCategory() { @@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest { return NAME; } - public boolean isMetadataBrowse() { - return metadataBrowse; - } - - public void setMetadataBrowse(boolean metadataBrowse) { - this.metadataBrowse = metadataBrowse; - } - public List getMetadataList() { return metadataList; } @@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest { this.sortOptions = sortOptions; } + /** + * - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names, + * subjects, types, etc. the second level is the actual list of items linked to a specific entry + * - flatBrowse if the browse index has one level: the full list of items + * - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree. + * The second level is the actual list of items linked to a specific entry + */ + public void setBrowseType(String browseType) { + this.browseType = browseType; + } + + public String getBrowseType() { + return browseType; + } + + public void setFacetType(String facetType) { + this.facetType = facetType; + } + + public String getFacetType() { + return facetType; + } + + public void setVocabulary(String vocabulary) { + this.vocabulary = vocabulary; + } + + + public String getVocabulary() { + return vocabulary; + } + @Override public Class getController() { return RestResourceController.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java index 7ec1b22500..b25d827e75 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java @@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest { private List filters = new LinkedList<>(); private List sortOptions = new LinkedList<>(); + private SortOption defaultSortOption; + public String getCategory() { return CATEGORY; } @@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest { return sortOptions; } + public SortOption getDefaultSortOption() { + return defaultSortOption; + } + + public void setDefaultSortOption(SortOption defaultSortOption) { + this.defaultSortOption = defaultSortOption; + } + @Override public boolean equals(Object object) { return (object instanceof SearchConfigurationRest && diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index e029dbaf99..46827711f2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,6 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; + private UUID clickedObject; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -97,4 +98,12 @@ public class SearchEventRest extends BaseObjectRest { public void setDsoType(String dsoType) { this.dsoType = dsoType; } + + public UUID getClickedObject() { + return clickedObject; + } + + public void setClickedObject(UUID clickedObject) { + this.clickedObject = clickedObject; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java index f6c821595f..61158704ea 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java @@ -7,9 +7,20 @@ */ package org.dspace.app.rest.model.hateoas; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +import org.atteo.evo.inflector.English; +import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.model.VocabularyRest; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.utils.Utils; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.springframework.hateoas.Link; +import org.springframework.web.util.UriComponentsBuilder; /** * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource @@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils; */ @RelNameDSpaceResource(BrowseIndexRest.NAME) public class BrowseIndexResource extends DSpaceResource { + + public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { super(bix, utils); // TODO: the following code will force the embedding of items and // entries in the browseIndex we need to find a way to populate the rels // array from the request/projection right now it is always null // super(bix, utils, "items", "entries"); - if (bix.isMetadataBrowse()) { - add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES)); + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) { + ChoiceAuthorityService choiceAuthorityService = + ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary()); + UriComponentsBuilder baseLink = linkTo( + methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null, + null, VocabularyRest.CATEGORY, + English.plural(VocabularyRest.NAME), source.getPluginInstanceName(), + "", null, null)).toUriComponentsBuilder(); + + add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY)); } - add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index ae3cf91d4c..12e27dccac 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -15,9 +15,12 @@ import java.util.List; import java.util.UUID; import javax.servlet.http.HttpServletRequest; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; +import org.dspace.app.rest.converter.JsonPatchConverter; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; @@ -38,6 +41,7 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -72,6 +76,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository operationsLimit) { + throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " + + operationsLimit); + } + resourcePatch.patch(obtainContext(), null, patch.getOperations()); + context.commit(); + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java index 93224f78cd..f608595c3d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java @@ -40,7 +40,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES) public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (bir.isMetadataBrowse() && "entries".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) && + name.equals(BrowseIndexRest.LINK_ENTRIES)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index 8ffefb619b..6aedcee6c0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -17,7 +18,10 @@ import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; import org.dspace.browse.CrossLinks; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -31,26 +35,48 @@ import org.springframework.stereotype.Component; @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) public class BrowseIndexRestRepository extends DSpaceRestRepository { + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + @Override @PreAuthorize("permitAll()") public BrowseIndexRest findOne(Context context, String name) { - BrowseIndexRest bi = null; + BrowseIndexRest bi = createFromMatchingBrowseIndex(name); + if (bi == null) { + bi = createFromMatchingVocabulary(name); + } + + return bi; + } + + private BrowseIndexRest createFromMatchingVocabulary(String name) { + DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name); + if (vocabularyIndex != null) { + return converter.toRest(vocabularyIndex, utils.obtainProjection()); + } + return null; + } + + private BrowseIndexRest createFromMatchingBrowseIndex(String name) { BrowseIndex bix; try { - bix = BrowseIndex.getBrowseIndex(name); + bix = BrowseIndex.getBrowseIndex(name); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); } if (bix != null) { - bi = converter.toRest(bix, utils.obtainProjection()); + return converter.toRest(bix, utils.obtainProjection()); } - return bi; + return null; } @Override public Page findAll(Context context, Pageable pageable) { try { - List indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); + List indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices())); + choiceAuthorityService.getChoiceAuthoritiesNames() + .stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null) + .forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name))); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java index 74aa9f38bf..baa79bc80a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java @@ -42,7 +42,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS) public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (!bir.isMetadataBrowse() && "items".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) && + name.equals(BrowseIndexRest.LINK_ITEMS)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java index c77dcf18dc..3c728d8c31 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -31,6 +32,7 @@ import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -72,6 +74,14 @@ public class CommunityCollectionLinkRepository extends AbstractDSpaceRestReposit discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCol : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java index c211810d11..135d964f3f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -29,6 +30,7 @@ import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -68,6 +70,14 @@ public class CommunitySubcommunityLinkRepository extends AbstractDSpaceRestRepos discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCommunities : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5..a93f5e55dc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -195,7 +195,11 @@ public abstract class DSpaceRestRepository matchingMetadataFields = new ArrayList<>(); if (StringUtils.isBlank(exactName)) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); + this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable); try { DiscoverResult searchResult = searchService.search(context, null, discoverQuery); for (IndexableObject object : searchResult.getIndexableObjects()) { @@ -149,6 +151,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository filterQueries = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { if (query.split("\\.").length > 3) { @@ -210,6 +214,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Sort.Order order = orderIterator.next(); + discoverQuery.setSortField(order.getProperty() + "_sort", + order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc : + DiscoverQuery.SORT_ORDER.desc); + } + discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); + discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; } @@ -247,10 +260,18 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository getFilesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java index f9f665d14f..f5b3edced2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java @@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java index 33addf7049..2479eeda97 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java @@ -94,6 +94,22 @@ public class ProcessRestRepository extends DSpaceRestRepository findByCurrentUser(Pageable pageable) { + + try { + Context context = obtainContext(); + long total = processService.countByUser(context, context.getCurrentUser()); + List processes = processService.findByUser(context, context.getCurrentUser(), + pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(processes, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Calls on the getBitstreams method to retrieve all the Bitstreams of this process * @param processId The processId of the Process to retrieve the Bitstreams for diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index 98f129b4bf..be28170d8a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -137,21 +137,31 @@ public class RegistrationRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); @@ -104,11 +100,17 @@ public class ScriptRestRepository extends DSpaceRestRepository dSpaceCommandLineParameters = processPropertiesToDSpaceCommandLineParameters(properties); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + if (scriptToExecute == null) { - throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); } - if (!scriptToExecute.isAllowedToExecute(context)) { - throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); + try { + if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) { + throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName + + " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", ")); + } + } catch (IllegalArgumentException e) { + throw new DSpaceBadRequestException("missed handle"); } RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler( context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java index ffefd1daac..24b6b96961 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java @@ -10,6 +10,7 @@ package org.dspace.app.rest.repository; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; +import java.util.Date; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -18,11 +19,11 @@ import org.dspace.app.rest.model.AccessConditionOptionRest; import org.dspace.app.rest.model.SubmissionUploadRest; import org.dspace.app.rest.projection.Projection; import org.dspace.core.Context; -import org.dspace.eperson.service.GroupService; import org.dspace.submit.model.AccessConditionOption; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -47,11 +48,6 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository uploadConfigs = uploadConfigurationService.getMap().values(); Projection projection = utils.obtainProjection(); List results = new ArrayList<>(); - List configNames = new ArrayList(); + List configNames = new ArrayList<>(); for (UploadConfiguration uploadConfig : uploadConfigs) { if (!configNames.contains(uploadConfig.getName())) { configNames.add(uploadConfig.getName()); @@ -92,13 +88,15 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository + * curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json" + * -d '[ + * {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"} + * ]' + * + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@Component +public class BitstreamRemoveOperation extends PatchOperation { + @Autowired + BitstreamService bitstreamService; + @Autowired + AuthorizeService authorizeService; + public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + + @Override + public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { + String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, ""); + Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete)); + if (bitstreamToDelete == null) { + throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); + } + authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); + + try { + bitstreamService.delete(context, bitstreamToDelete); + } catch (AuthorizeException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && + operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); + } + + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) + throws SQLException { + try { + authorizeService.authorizeAction(context, bitstream, operation); + } catch (AuthorizeException e) { + throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java new file mode 100644 index 0000000000..cb977dff3a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.security; + +import java.io.Serializable; +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.model.TemplateItemRest; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +/** + * {@link RestObjectPermissionEvaluatorPlugin} class that evaluate WRITE and DELETE permission over a TemplateItem + * + * @author Bui Thai Hai (thaihai.bui@dlcorp.com.vn) + */ +@Component +public class TemplateItemRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { + + private static final Logger log = LoggerFactory.getLogger(TemplateItemRestPermissionEvaluatorPlugin.class); + + @Autowired + private RequestService requestService; + + @Autowired + ItemService its; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean hasDSpacePermission(Authentication authentication, Serializable targetId, String targetType, + DSpaceRestPermission permission) { + + DSpaceRestPermission restPermission = DSpaceRestPermission.convert(permission); + if (!DSpaceRestPermission.WRITE.equals(restPermission) && + !DSpaceRestPermission.DELETE.equals(restPermission)) { + return false; + } + if (!StringUtils.equalsIgnoreCase(targetType, TemplateItemRest.NAME)) { + return false; + } + + Request request = requestService.getCurrentRequest(); + Context context = ContextUtil.obtainContext(request.getHttpServletRequest()); + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return false; + } + // Allow collection's admin to edit/delete the template + + UUID dsoId = UUID.fromString(targetId.toString()); + requestService.getCurrentRequest().getHttpServletRequest().getRequestURL(); + try { + Collection coll = its.find(context, dsoId).getTemplateItemOf(); + if (authorizeService.isAdmin(context, coll)) { + return true; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return false; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java index f6d5bed198..750c50524d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java @@ -6,9 +6,11 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.submit.factory.impl; + import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Date; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -23,11 +25,12 @@ import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** * Submission "add" operation to add custom resource policies. - * + * * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) */ public class AccessConditionAddPatchOperation extends AddPatchOperation { @@ -52,6 +55,18 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation accessConditions = parseAccessConditions(path, value, absolutePath); + // Clamp access condition dates to midnight UTC + for (AccessConditionDTO condition : accessConditions) { + Date date = condition.getStartDate(); + if (null != date) { + condition.setStartDate(TimeHelpers.toMidnightUTC(date)); + } + date = condition.getEndDate(); + if (null != date) { + condition.setEndDate(TimeHelpers.toMidnightUTC(date)); + } + } + verifyAccessConditions(context, configuration, accessConditions); if (absolutePath.length == 1) { @@ -65,7 +80,7 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation parseAccessConditions(String path, Object value, String[] split) { - List accessConditions = new ArrayList(); + List accessConditions = new ArrayList<>(); if (split.length == 1) { accessConditions = evaluateArrayObject((LateObjectEvaluator) value); } else if (split.length == 2) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java index 0216628a6b..d2529cbca3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java @@ -6,6 +6,7 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.submit.factory.impl; + import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -29,6 +30,7 @@ import org.dspace.core.Context; import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.TimeHelpers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -106,7 +108,7 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< return null; } - private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplare) + private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplace) throws ParseException { AccessConditionDTO accessCondition = new AccessConditionDTO(); accessCondition.setName(rpToReplace.getRpName()); @@ -114,13 +116,13 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< accessCondition.setEndDate(rpToReplace.getEndDate()); switch (attributeReplace) { case "name": - accessCondition.setName(valueToReplare); + accessCondition.setName(valueToReplace); return accessCondition; case "startDate": - accessCondition.setStartDate(parseDate(valueToReplare)); + accessCondition.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; case "endDate": - accessCondition.setEndDate(parseDate(valueToReplare)); + accessCondition.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; default: throw new UnprocessableEntityException("The provided attribute: " @@ -128,17 +130,17 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< } } - private void updatePolicy(Context context, String valueToReplare, String attributeReplace, + private void updatePolicy(Context context, String valueToReplace, String attributeReplace, ResourcePolicy rpToReplace) throws SQLException, AuthorizeException { switch (attributeReplace) { case "name": - rpToReplace.setRpName(valueToReplare); + rpToReplace.setRpName(valueToReplace); break; case "startDate": - rpToReplace.setStartDate(parseDate(valueToReplare)); + rpToReplace.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; case "endDate": - rpToReplace.setEndDate(parseDate(valueToReplare)); + rpToReplace.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; default: throw new IllegalArgumentException("Attribute to replace is not valid:" + attributeReplace); diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml new file mode 100644 index 0000000000..4a91ef051e --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -0,0 +1,1118 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java index e9b130e703..68511a9d89 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/itemimport/ItemImportIT.java @@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; @@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private Collection collection; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -126,6 +128,10 @@ public class ItemImportIT extends AbstractEntityIntegrationTest { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 09dbdca505..7a4aa0c007 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,6 +17,7 @@ import static org.dspace.builder.BitstreamFormatBuilder.createBitstreamFormat; import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; +import static org.dspace.core.Constants.DEFAULT_BITSTREAM_READ; import static org.dspace.core.Constants.READ; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.not; @@ -56,6 +57,7 @@ import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -70,6 +72,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.disseminate.CitationDocumentServiceImpl; import org.dspace.eperson.EPerson; @@ -112,6 +115,12 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest @Autowired private BitstreamFormatService bitstreamFormatService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private CollectionService collectionService; + private Bitstream bitstream; private BitstreamFormat supportedFormat; private BitstreamFormat knownFormat; @@ -626,6 +635,54 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest } + @Test + public void testBitstreamDefaultReadInheritanceFromCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community and one collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + // Explicitly create a restrictive default bitstream read policy on the collection + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + authorizeService.removePoliciesActionFilter(context, col1, DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, col1, DEFAULT_BITSTREAM_READ, internalGroup); + + //2. A public item with a new bitstream that is not explicitly restricted + // but should instead inherit + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + // make sure this item has no default policies for a new bundle to inherit + authorizeService.removePoliciesActionFilter(context, publicItem1, DEFAULT_BITSTREAM_READ); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Test Restricted Bitstream") + .withDescription("This bitstream is restricted") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + //** WHEN ** + //We download the bitstream + getClient().perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + //** THEN ** + .andExpect(status().isUnauthorized()); + + //An unauthorized request should not log statistics + checkNumberOfStatsRecords(bitstream, 0); + } + @Test public void restrictedGroupBitstreamForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index d6947d7567..8b34edb938 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,12 +7,16 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; @@ -21,9 +25,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -33,6 +39,7 @@ import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; @@ -41,6 +48,7 @@ import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; @@ -52,15 +60,20 @@ import org.dspace.content.Item; import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -79,6 +92,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -2370,6 +2389,513 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest )); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the rest.patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 24850cd11b..d1791ab872 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -63,22 +64,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(4))) + .andExpect(jsonPath("$.page.totalElements", is(5))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) - //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(4))) + //The array of browse index should have a size 5 + .andExpect(jsonPath("$._embedded.browses", hasSize(5))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"), - BrowseIndexMatcher.subjectBrowseIndex("asc") + BrowseIndexMatcher.subjectBrowseIndex("asc"), + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc") ))) ; } @@ -125,6 +127,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //Check that the JSON root matches the expected browse index + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint @@ -2142,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -2159,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 9a0d39225c..72524709ec 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -146,13 +146,15 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt + " Medical College of Prevention of Iodine Deficiency Diseases"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -172,13 +175,15 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt return records; } -} \ No newline at end of file +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index 8b01e7b377..a115c8aa2f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -6473,7 +6473,7 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest .andExpect(jsonPath("$.facetType", equalTo("authority"))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", - containsString("api/discover/facets/supervisedBy?prefix=group%2520B&configuration=supervision"))) + containsString("api/discover/facets/supervisedBy?prefix=group%20B&configuration=supervision"))) //This is how the page object must look like because it's the default with size 20 .andExpect(jsonPath("$.page", is(PageMatcher.pageEntry(0, 20)))) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 0000000000..a3408a7736 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,677 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml + */ +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community parentCommunity1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community parentCommunity2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(parentCommunity1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java index 55e82831f3..1fd9e81ca8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java @@ -33,6 +33,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.core.Constants; import org.hamcrest.Matchers; @@ -243,6 +244,35 @@ public class ItemTemplateRestControllerIT extends AbstractControllerIntegrationT ))))); } + /* Similar to patchTemplateItem(), except it is for collection admin, not repository admin + Test case was simplified, since it does not do anything else. + */ + @Test + public void patchTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(patch(getTemplateItemUrlTemplate(itemId)) + .content(patchBody) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + + getClient(collAdminToken).perform(get(getCollectionTemplateItemUrlTemplate(childCollection.getID().toString()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + } + @Test public void patchIllegalInArchiveTemplateItem() throws Exception { setupTestTemplate(); @@ -337,6 +367,22 @@ public class ItemTemplateRestControllerIT extends AbstractControllerIntegrationT .andExpect(status().isNoContent()); } + /*Similar to deleteTemplateItem(), except it is for collection admin, not repository admin + */ + @Test + public void deleteTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(delete(getTemplateItemUrlTemplate(itemId))) + .andExpect(status().isNoContent()); + } + @Test public void deleteTemplateItemNoRights() throws Exception { setupTestTemplate(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java index f1a1a095b1..72508a0dad 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadataSchemaRestRepositoryIT.java @@ -88,7 +88,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio context.turnOffAuthorisationSystem(); MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") - .build(); + .build(); context.restoreAuthSystemState(); MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); @@ -116,6 +116,41 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio } } + @Test + public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace") + .build(); + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT); + metadataSchemaRest.setPrefix("test.SchemaName"); + metadataSchemaRest.setNamespace(TEST_NAMESPACE); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test,SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataSchemaRest.setPrefix("test SchemaName"); + getClient(authToken) + .perform(post("/api/core/metadataschemas") + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedTest() throws Exception { @@ -202,7 +237,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); metadataSchemaRest.setId(metadataSchema.getID()); - metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setPrefix(TEST_NAME); metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) @@ -214,7 +249,33 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) .andExpect(status().isOk()) .andExpect(jsonPath("$", MetadataschemaMatcher - .matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED))); + .matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED))); + } + + @Test + public void update_schemaNameShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE) + .build(); + + context.restoreAuthSystemState(); + + MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest(); + metadataSchemaRest.setId(metadataSchema.getID()); + metadataSchemaRest.setPrefix(TEST_NAME_UPDATED); + metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadataschemas/" + metadataSchema.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataschemaMatcher + .matchEntry(TEST_NAME, TEST_NAMESPACE))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java index 1826cd0fbb..a4a69ca8b1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/MetadatafieldRestRepositoryIT.java @@ -9,6 +9,7 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -49,12 +50,12 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest { - private static final String ELEMENT = "test element"; - private static final String QUALIFIER = "test qualifier"; + private static final String ELEMENT = "test_element"; + private static final String QUALIFIER = "test_qualifier"; private static final String SCOPE_NOTE = "test scope_note"; - private static final String ELEMENT_UPDATED = "test element updated"; - private static final String QUALIFIER_UPDATED = "test qualifier updated"; + private static final String ELEMENT_UPDATED = "test_element_updated"; + private static final String QUALIFIER_UPDATED = "test_qualifier_updated"; private static final String SCOPE_NOTE_UPDATED = "test scope_note updated"; private MetadataSchema metadataSchema; @@ -564,6 +565,70 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .andExpect(status().isUnprocessableEntity()); } + @Test + public void findByFieldName_sortByFieldNameASC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,ASC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField2), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField1) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + + @Test + public void findByFieldName_sortByFieldNameDESC() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema", + "http://www.dspace.org/ns/aschema").build(); + + MetadataField metadataField1 = MetadataFieldBuilder + .createMetadataField(context, schema, "2", null, "AScopeNote").build(); + + MetadataField metadataField2 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", null, "AScopeNote").build(); + + MetadataField metadataField3 = MetadataFieldBuilder + .createMetadataField(context, schema, "1", "a", "AScopeNote").build(); + + context.restoreAuthSystemState(); + + getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) + .param("query", schema.getName()) + .param("sort", "fieldName,DESC")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.metadatafields", contains( + MetadataFieldMatcher.matchMetadataField(metadataField1), + MetadataFieldMatcher.matchMetadataField(metadataField3), + MetadataFieldMatcher.matchMetadataField(metadataField2) + ))) + .andExpect(jsonPath("$.page.size", is(20))) + .andExpect(jsonPath("$.page.totalElements", is(3))); + } + @Test public void createSuccess() throws Exception { @@ -575,7 +640,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,94 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setElement("testElement ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier,ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + metadataFieldRest.setQualifier("testQualifier ForCreate"); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { @@ -832,31 +988,81 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) - .build(); + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isOk()); + } + + @Test + public void update_elementShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); context.restoreAuthSystemState(); MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); metadataFieldRest.setId(metadataField.getID()); metadataFieldRest.setElement(ELEMENT_UPDATED); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(put("/api/core/metadatafields/" + metadataField.getID()) + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + + getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); + } + + @Test + public void update_qualifierShouldThrowError() throws Exception { + context.turnOffAuthorisationSystem(); + + MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) + .build(); + + context.restoreAuthSystemState(); + + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setId(metadataField.getID()); + metadataFieldRest.setElement(ELEMENT); metadataFieldRest.setQualifier(QUALIFIER_UPDATED); metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED); getClient(getAuthToken(admin.getEmail(), password)) .perform(put("/api/core/metadatafields/" + metadataField.getID()) - .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) - .contentType(contentType)) - .andExpect(status().isOk()); + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); getClient().perform(get("/api/core/metadatafields/" + metadataField.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( - metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED) - )); + .andExpect(status().isOk()) + .andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys( + metadataSchema.getName(), ELEMENT, QUALIFIER) + )); } @Test - public void update_checkUpdatedInIndex() throws Exception { + public void update_checkNotUpdatedInIndex() throws Exception { context.turnOffAuthorisationSystem(); MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE) @@ -885,27 +1091,27 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .perform(put("/api/core/metadatafields/" + metadataField.getID()) .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) .contentType(contentType)) - .andExpect(status().isOk()); + .andExpect(status().isUnprocessableEntity()); - // new metadata field found in index + // new metadata field not found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", ELEMENT_UPDATED) .param("qualifier", QUALIFIER_UPDATED)) .andExpect(status().isOk()) - .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( - MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), - ELEMENT_UPDATED, QUALIFIER_UPDATED)) - )) - .andExpect(jsonPath("$.page.totalElements", is(1))); + .andExpect(jsonPath("$.page.totalElements", is(0))); - // original metadata field not found in index + // original metadata field found in index getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT) .param("schema", metadataSchema.getName()) .param("element", metadataField.getElement()) .param("qualifier", metadataField.getQualifier())) .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem( + MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(), + ELEMENT, QUALIFIER)) + )) + .andExpect(jsonPath("$.page.totalElements", is(1))); } @Test diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 5ac416e606..670d8e2f35 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess; +import static org.dspace.content.ProcessStatus.SCHEDULED; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -220,22 +222,35 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -243,25 +258,34 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -269,12 +293,18 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -783,27 +813,68 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(status().isBadRequest()); } + @Test + public void testFindByCurrentUser() throws Exception { + + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + ProcessBuilder.createProcess(context, admin, "mock-script", parameters) + .withStartAndEndTime("11/01/1990", "19/01/1990") + .build(); + Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("12/01/1990", "18/01/1990") + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/system/processes/search/own")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.processes", contains( + matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED), + matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED)))) + .andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2)))); + + } + @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..3b39d25121 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java index 3eaf8373cb..3ba84f344b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RegistrationRestRepositoryIT.java @@ -226,7 +226,7 @@ public class RegistrationRestRepositoryIT extends AbstractControllerIntegrationT } @Test - public void testRegisterDomainNotRegisteredMailAddressRegistred() throws Exception { + public void testRegisterMailAddressRegistered() throws Exception { List registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); try { context.turnOffAuthorisationSystem(); @@ -236,7 +236,7 @@ public class RegistrationRestRepositoryIT extends AbstractControllerIntegrationT .withCanLogin(true) .build(); context.restoreAuthSystemState(); - configurationService.setProperty("authentication-password.domain.valid", "test.com"); + RegistrationRest registrationRest = new RegistrationRest(); registrationRest.setEmail(email); @@ -245,9 +245,10 @@ public class RegistrationRestRepositoryIT extends AbstractControllerIntegrationT .param(TYPE_QUERY_PARAM, TYPE_REGISTER) .content(mapper.writeValueAsBytes(registrationRest)) .contentType(contentType)) - .andExpect(status().isUnprocessableEntity()); + .andExpect(status().isCreated()); registrationDataList = registrationDataDAO.findAll(context, RegistrationData.class); - assertEquals(0, registrationDataList.size()); + assertEquals(1, registrationDataList.size()); + assertTrue(StringUtils.equalsIgnoreCase(registrationDataList.get(0).getEmail(), email)); } finally { Iterator iterator = registrationDataList.iterator(); while (iterator.hasNext()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 4a90efb2c1..2fb7dbbc96 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -8,14 +8,12 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; -import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.exparity.hamcrest.date.DateMatchers.within; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.text.IsEmptyString.emptyOrNullString; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -34,6 +32,7 @@ import java.io.InputStream; import java.sql.SQLException; import java.time.temporal.ChronoUnit; import java.util.Date; +import java.util.Iterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -221,33 +220,34 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(eperson.getEmail())) { + // Verify request data + assertEquals(eperson.getFullName(), requestItem.getReqName()); + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(true, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } - } +} /** * Test of createAndReturn method, with an UNauthenticated user. @@ -273,30 +273,32 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(RequestItemBuilder.REQ_EMAIL)) { + // Verify request data + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(RequestItemBuilder.REQ_NAME, requestItem.getReqName()); + assertEquals(bitstream.getID(), requestItem.getBitstream().getID()); + assertEquals(false, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 07edfeec33..42c9f2c9f7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -44,6 +45,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; @@ -53,6 +55,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; import org.dspace.content.service.BitstreamService; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; @@ -123,12 +126,72 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test - public void findAllScriptsUnauthorizedTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) - .andExpect(status().isForbidden()); + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isUnauthorized()); + } + + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -222,6 +285,63 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -235,15 +355,51 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/system/scripts/mock-script-invalid")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test @@ -277,16 +433,6 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -322,7 +468,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -434,12 +580,19 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -601,9 +754,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } } - @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -611,6 +764,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index bd40cfdc9d..978d8feb58 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -411,4 +411,114 @@ public class SearchEventRestRepositoryIT extends AbstractControllerIntegrationTe .andExpect(status().isCreated()); } + + @Test + public void postTestWithClickedObjectSuccess() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(publicItem1.getID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + } + + @Test + public void postTestWithClickedObjectNotExisting() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(UUID.randomUUID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isBadRequest()); + + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index e7d43ec4d6..babb1fac23 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -257,10 +257,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -268,7 +268,7 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra .param("page", "1")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("test-hidden"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=0"), Matchers.containsString("size=1")))) @@ -285,8 +285,8 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page="), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(1))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -294,30 +294,56 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra .param("page", "2")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess"))) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=0"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=1"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=2"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) .andExpect(jsonPath("$.page.number", is(2))); + getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") + .param("size", "1") + .param("page", "3")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=0"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.next.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$.page.size", is(1))) + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(3))); + getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") .param("size", "1") - .param("page", "3")) + .param("page", "4")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("qualdroptest"))) @@ -326,24 +352,24 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=2"), Matchers.containsString("size=1")))) + Matchers.containsString("page=3"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.next.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) - .andExpect(jsonPath("$.page.number", is(3))); + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(4))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") .param("size", "1") - .param("page", "4")) + .param("page", "5")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("extractiontestprocess"))) @@ -352,20 +378,20 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=0"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=3"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next.href", Matchers.allOf( - Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + .andExpect(jsonPath("$._links.next.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=5"), Matchers.containsString("size=1")))) + Matchers.containsString("page=6"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(6))) - .andExpect(jsonPath("$.page.totalPages", is(6))) - .andExpect(jsonPath("$.page.number", is(4))); + .andExpect(jsonPath("$.page.totalElements", is(7))) + .andExpect(jsonPath("$.page.totalPages", is(7))) + .andExpect(jsonPath("$.page.number", is(5))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 241bdefe21..cf1e0c7c76 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -67,13 +67,13 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(content().contentType(contentType)) //The configuration file for the test env includes 6 forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) //The array of submissionforms should have a size of 8 - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))) + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))) ; } @@ -84,12 +84,12 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))); + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10)))); } @Test @@ -696,10 +696,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("page=1"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -707,8 +707,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .param("page", "1")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("languagetest"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-workflow-hidden"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("languagetest"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -723,10 +723,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("page=2"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(1))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -734,8 +734,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .param("page", "2")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) - .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-submission-hidden"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -747,10 +747,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("page=2"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(2))); getClient(tokenAdmin).perform(get("/api/config/submissionforms") @@ -758,7 +758,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .param("page", "3")) .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo"))) + .andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority"))) .andExpect(jsonPath("$._links.first.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=0"), Matchers.containsString("size=2")))) @@ -770,10 +771,33 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissionforms?"), - Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(8))) - .andExpect(jsonPath("$.page.totalPages", equalTo(4))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) .andExpect(jsonPath("$.page.number", is(3))); + + getClient(tokenAdmin).perform(get("/api/config/submissionforms") + .param("size", "2") + .param("page", "4")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone"))) + .andExpect(jsonPath("$._links.first.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=0"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=3"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.self.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$._links.last.href", Matchers.allOf( + Matchers.containsString("/api/config/submissionforms?"), + Matchers.containsString("page=4"), Matchers.containsString("size=2")))) + .andExpect(jsonPath("$.page.size", is(2))) + .andExpect(jsonPath("$.page.totalElements", equalTo(10))) + .andExpect(jsonPath("$.page.totalPages", equalTo(5))) + .andExpect(jsonPath("$.page.number", is(4))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java index c43821d4a0..72612fc5eb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkflowItemRestRepositoryIT.java @@ -2122,4 +2122,35 @@ public class WorkflowItemRestRepositoryIT extends AbstractControllerIntegrationT WorkflowItemBuilder.deleteWorkflowItem(idRef.get()); } } + + @Test + public void testWorkflowWithHiddenSections() throws Exception { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden") + .withName("Collection 1") + .withWorkflowGroup(1, eperson) + .build(); + + XmlWorkflowItem workflowItem = WorkflowItemBuilder.createWorkflowItem(context, collection) + .withTitle("Workflow Item") + .build(); + + context.restoreAuthSystemState(); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/workflow/workflowitems/" + workflowItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.test-outside-workflow-hidden").exists()) + .andExpect(jsonPath("$.sections.test-outside-submission-hidden").doesNotExist()) + .andExpect(jsonPath("$.sections.test-never-hidden").exists()) + .andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist()); + + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index 6c97526425..8b2f3f093a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -8566,4 +8566,41 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration ))); } + @Test + public void testSubmissionWithHiddenSections() throws Exception { + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden") + .withName("Collection 1") + .build(); + + WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .withIssueDate("2023-01-01") + .withType("book") + .build(); + + context.restoreAuthSystemState(); + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken) + .perform(get("/api/submission/workspaceitems/" + workspaceItem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.sections.test-outside-workflow-hidden").doesNotExist()) + .andExpect(jsonPath("$.sections.test-outside-submission-hidden").exists()) + .andExpect(jsonPath("$.sections.test-never-hidden").exists()) + .andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist()); + + // Deposit the item + getClient(adminToken).perform(post("/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + workspaceItem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index 82d611facf..80f27b6bbb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -31,7 +33,8 @@ public class BrowseIndexMatcher { public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +47,8 @@ public class BrowseIndexMatcher { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +60,8 @@ public class BrowseIndexMatcher { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +74,8 @@ public class BrowseIndexMatcher { public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -77,4 +83,22 @@ public class BrowseIndexMatcher { hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) ); } + + public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + return allOf( + hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 82bedf4a92..6483758802 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -110,6 +110,17 @@ public class FacetEntryMatcher { ); } + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index 330d31263b..9bbe430bff 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -60,6 +60,16 @@ public class FacetValueMatcher { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubjectWithAuthority(String label, String authority, int count) { return allOf( diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java new file mode 100644 index 0000000000..24a94a4d4b --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CollectionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunityCollectionLinkRepository} + */ +public class CommunityCollectionLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Collection collection1; + Collection collection2; + Collection collection3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + collection3 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getCollections_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection1), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection3) + ))); + } + + @Test + public void getCollections_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection3), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java new file mode 100644 index 0000000000..aa3b1c0721 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CommunityMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunitySubcommunityLinkRepository} + */ +public class CommunitySubcommunityLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Community subCommunity1; + Community subCommunity2; + Community subCommunity3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + subCommunity1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 1") + .build(); + subCommunity2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 2") + .build(); + subCommunity3 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getSubCommunities_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity1), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity3) + ))); + } + + @Test + public void getSubCommunities_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity3), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f9..e21f395f09 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,16 @@ public class RestDiscoverQueryBuilderTest { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), null, null); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487..ccb7d43a23 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import java.io.InputStream; import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public class TypeConversionTestScriptConfiguration siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 0000000000..1ff9570777 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 0000000000..666fb1e7d5 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
+ + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
+ + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
+ + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 0000000000..949d3b1250 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
+ + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
+ + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
+ + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
+
\ No newline at end of file diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5e..6cffa7ee66 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import java.util.Map; import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceChangeListener; import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ import org.springframework.context.support.ClassPathXmlApplicationContext; */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 0000000000..cb60a223a1 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} diff --git a/dspace-sword/pom.xml b/dspace-sword/pom.xml index f5ef7e01d8..fd68337bbc 100644 --- a/dspace-sword/pom.xml +++ b/dspace-sword/pom.xml @@ -24,25 +24,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - - diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 35206d6ee2..9badeb2fe8 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -22,38 +22,6 @@ ${basedir}/.. - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - postgres-support - - - !db.name - - - - - org.postgresql - postgresql - - - - - javax.servlet diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 7b66eaf043..3a1d75eb56 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -75,6 +75,9 @@ + + @@ -658,6 +661,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1125,11 +1162,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> @@ -1207,7 +1244,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 2ab26dcf57..a6cd49bdf1 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -47,6 +47,26 @@ --> + + + @@ -79,7 +99,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission - - - @@ -725,121 +722,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community - -withdrawn:true AND -discoverable:false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -938,6 +823,8 @@ + + @@ -1015,6 +902,7 @@ + @@ -1282,7 +1170,7 @@ - + @@ -1415,7 +1303,7 @@ - + @@ -1532,7 +1420,7 @@ - + @@ -1652,7 +1540,7 @@ - + @@ -1777,7 +1665,7 @@ - + @@ -1897,7 +1785,7 @@ - + @@ -2016,7 +1904,7 @@ - + @@ -2232,18 +2120,6 @@ - - - - - - - - - - - - diff --git a/dspace/config/spring/oai/oai.xml b/dspace/config/spring/oai/oai.xml index d701c164b6..6b103990de 100644 --- a/dspace/config/spring/oai/oai.xml +++ b/dspace/config/spring/oai/oai.xml @@ -21,4 +21,8 @@ + + + diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 8aee946d84..39a4778356 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -910,9 +910,9 @@ - dc - dcterms - references + dcterms + references + false onebox diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 8f5a6f84f4..dd98bf0cbd 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -61,22 +61,6 @@ - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - unit-test-environment diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index a7c9b5922c..115393b7db 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -90,24 +90,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 9b696fa0cb..41ddb94be5 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -244,22 +244,6 @@ just adding new jar in the classloader - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index caa646ba1b..df21afbc64 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -283,6 +283,7 @@ + diff --git a/pom.xml b/pom.xml index 3b6f7edb65..deea90890c 100644 --- a/pom.xml +++ b/pom.xml @@ -19,13 +19,13 @@ 11 - 5.3.20 - 2.6.8 - 5.6.5 - 5.6.5.Final - 6.0.23.Final - 42.4.3 - 8.11.1 + 5.3.27 + 2.7.12 + 5.7.8 + 5.6.15.Final + 6.2.5.Final + 42.6.0 + 8.11.2 3.4.0 2.10.0 @@ -38,11 +38,11 @@ 1.1.0 9.4.51.v20230217 - 2.17.1 - 2.0.27 - 1.18.0 - 1.7.25 - 2.3.0 + 2.20.0 + 2.0.28 + 1.19.0 + 1.7.36 + 2.5.0 1.70 @@ -1617,11 +1617,6 @@ icu4j 62.1 - - com.oracle - ojdbc6 - 11.2.0.4.0 - org.dspace