mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 01:54:22 +00:00
Merge branch 'main' into w2p-102124_bitstream-formats-download-attachment-main
This commit is contained in:
@@ -833,6 +833,24 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.findify</groupId>
|
||||
<artifactId>s3mock_2.13</artifactId>
|
||||
<version>0.2.6</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.amazonawsl</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.amazonaws</groupId>
|
||||
<artifactId>aws-java-sdk-s3</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
<dependencyManagement>
|
||||
@@ -896,6 +914,12 @@
|
||||
<artifactId>swagger-core</artifactId>
|
||||
<version>1.6.2</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.13.9</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
@@ -7,33 +7,16 @@
|
||||
*/
|
||||
package org.dspace.administer;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ProcessCleaner} script.
|
||||
*/
|
||||
public class ProcessCleanerConfiguration<T extends ProcessCleaner> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,33 +7,16 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataDeletion} script.
|
||||
*/
|
||||
public class MetadataDeletionScriptConfiguration<T extends MetadataDeletion> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,22 +7,14 @@
|
||||
*/
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataExport} script
|
||||
*/
|
||||
public class MetadataExportScriptConfiguration<T extends MetadataExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration<T extends MetadataExport> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -9,7 +9,6 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
@@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSea
|
||||
this.dspaceRunnableclass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -8,22 +8,15 @@
|
||||
package org.dspace.app.bulkedit;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link MetadataImport} script
|
||||
*/
|
||||
public class MetadataImportScriptConfiguration<T extends MetadataImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration<T extends MetadataImport> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,18 +7,11 @@
|
||||
*/
|
||||
package org.dspace.app.harvest;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
|
||||
public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfiguration<T> {
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@@ -32,13 +25,6 @@ public class HarvestScriptConfiguration<T extends Harvest> extends ScriptConfigu
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -7,14 +7,9 @@
|
||||
*/
|
||||
package org.dspace.app.itemexport;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemExport} script
|
||||
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration<T extends ItemExport> extends ScriptC
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -332,17 +332,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
|
||||
*/
|
||||
protected void readZip(Context context, ItemImportService itemImportService) throws Exception {
|
||||
Optional<InputStream> optionalFileStream = Optional.empty();
|
||||
Optional<InputStream> validationFileStream = Optional.empty();
|
||||
if (!remoteUrl) {
|
||||
// manage zip via upload
|
||||
optionalFileStream = handler.getFileStream(context, zipfilename);
|
||||
validationFileStream = handler.getFileStream(context, zipfilename);
|
||||
} else {
|
||||
// manage zip via remote url
|
||||
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
|
||||
}
|
||||
|
||||
if (optionalFileStream.isPresent()) {
|
||||
if (validationFileStream.isPresent()) {
|
||||
// validate zip file
|
||||
Optional<InputStream> validationFileStream = handler.getFileStream(context, zipfilename);
|
||||
if (validationFileStream.isPresent()) {
|
||||
validateZip(validationFileStream.get());
|
||||
}
|
||||
|
@@ -8,14 +8,10 @@
|
||||
package org.dspace.app.itemimport;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link ItemImport} script
|
||||
@@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration<T extends ItemImport> extends ScriptC
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -7,25 +7,16 @@
|
||||
*/
|
||||
package org.dspace.app.mediafilter;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins";
|
||||
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
@@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration<T extends MediaFilterScript> extends
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
Options options = new Options();
|
||||
|
@@ -8,7 +8,6 @@
|
||||
package org.dspace.app.solrdatabaseresync;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
@@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -22,7 +22,10 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.content.service.CollectionService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.services.factory.DSpaceServicesFactory;
|
||||
import org.w3c.dom.Document;
|
||||
@@ -105,6 +108,13 @@ public class SubmissionConfigReader {
|
||||
*/
|
||||
private SubmissionConfig lastSubmissionConfig = null;
|
||||
|
||||
/**
|
||||
* Collection Service instance, needed to interact with collection's
|
||||
* stored data
|
||||
*/
|
||||
protected static final CollectionService collectionService
|
||||
= ContentServiceFactory.getInstance().getCollectionService();
|
||||
|
||||
/**
|
||||
* Load Submission Configuration from the
|
||||
* item-submission.xml configuration file
|
||||
@@ -152,6 +162,9 @@ public class SubmissionConfigReader {
|
||||
} catch (FactoryConfigurationError fe) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Cannot create Item Submission Configuration parser", fe);
|
||||
} catch (SearchServiceException se) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Cannot perform a discovery search for Item Submission Configuration", se);
|
||||
} catch (Exception e) {
|
||||
throw new SubmissionConfigReaderException(
|
||||
"Error creating Item Submission Configuration: " + e);
|
||||
@@ -287,7 +300,7 @@ public class SubmissionConfigReader {
|
||||
* should correspond to the collection-form maps, the form definitions, and
|
||||
* the display/storage word pairs.
|
||||
*/
|
||||
private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException {
|
||||
private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException {
|
||||
if (n == null) {
|
||||
return;
|
||||
}
|
||||
@@ -334,18 +347,23 @@ public class SubmissionConfigReader {
|
||||
* the collection handle and item submission name, put name in hashmap keyed
|
||||
* by the collection handle.
|
||||
*/
|
||||
private void processMap(Node e) throws SAXException {
|
||||
private void processMap(Node e) throws SAXException, SearchServiceException {
|
||||
// create a context
|
||||
Context context = new Context();
|
||||
|
||||
NodeList nl = e.getChildNodes();
|
||||
int len = nl.getLength();
|
||||
for (int i = 0; i < len; i++) {
|
||||
Node nd = nl.item(i);
|
||||
if (nd.getNodeName().equals("name-map")) {
|
||||
String id = getAttribute(nd, "collection-handle");
|
||||
String entityType = getAttribute(nd, "collection-entity-type");
|
||||
String value = getAttribute(nd, "submission-name");
|
||||
String content = getValue(nd);
|
||||
if (id == null) {
|
||||
if (id == null && entityType == null) {
|
||||
throw new SAXException(
|
||||
"name-map element is missing collection-handle attribute in 'item-submission.xml'");
|
||||
"name-map element is missing collection-handle or collection-entity-type attribute " +
|
||||
"in 'item-submission.xml'");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new SAXException(
|
||||
@@ -355,7 +373,17 @@ public class SubmissionConfigReader {
|
||||
throw new SAXException(
|
||||
"name-map element has content in 'item-submission.xml', it should be empty.");
|
||||
}
|
||||
collectionToSubmissionConfig.put(id, value);
|
||||
if (id != null) {
|
||||
collectionToSubmissionConfig.put(id, value);
|
||||
|
||||
} else {
|
||||
// get all collections for this entity-type
|
||||
List<Collection> collections = collectionService.findAllCollectionsByEntityType( context,
|
||||
entityType);
|
||||
for (Collection collection : collections) {
|
||||
collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value);
|
||||
}
|
||||
}
|
||||
} // ignore any child node that isn't a "name-map"
|
||||
}
|
||||
}
|
||||
@@ -635,4 +663,4 @@ public class SubmissionConfigReader {
|
||||
}
|
||||
return results;
|
||||
}
|
||||
}
|
||||
}
|
@@ -43,6 +43,7 @@ import org.dspace.discovery.SearchService;
|
||||
import org.dspace.discovery.SearchServiceException;
|
||||
import org.dspace.discovery.indexobject.IndexableCollection;
|
||||
import org.dspace.discovery.indexobject.IndexableCommunity;
|
||||
import org.dspace.discovery.indexobject.IndexableItem;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.eperson.service.GroupService;
|
||||
@@ -755,6 +756,19 @@ public class AuthorizeServiceImpl implements AuthorizeService {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||
*
|
||||
* @param context context with the current user
|
||||
* @return true if the current user is an item admin in the site
|
||||
* false when this is not the case, or an exception occurred
|
||||
* @throws java.sql.SQLException passed through.
|
||||
*/
|
||||
@Override
|
||||
public boolean isItemAdmin(Context context) throws SQLException {
|
||||
return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is a community or collection admin in the site.
|
||||
*
|
||||
|
@@ -514,6 +514,15 @@ public interface AuthorizeService {
|
||||
*/
|
||||
boolean isCollectionAdmin(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is an item admin in the site by querying the solr database.
|
||||
*
|
||||
* @param context context with the current user
|
||||
* @return true if the current user is an item admin in the site
|
||||
* false when this is not the case, or an exception occurred
|
||||
*/
|
||||
boolean isItemAdmin(Context context) throws SQLException;
|
||||
|
||||
/**
|
||||
* Checks that the context's current user is a community or collection admin in the site.
|
||||
*
|
||||
|
@@ -245,7 +245,7 @@ public final class CheckerCommand {
|
||||
info.setProcessStartDate(new Date());
|
||||
|
||||
try {
|
||||
Map checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
||||
Map<String, Object> checksumMap = bitstreamStorageService.computeChecksum(context, info.getBitstream());
|
||||
if (MapUtils.isNotEmpty(checksumMap)) {
|
||||
info.setBitstreamFound(true);
|
||||
if (checksumMap.containsKey("checksum")) {
|
||||
@@ -255,10 +255,16 @@ public final class CheckerCommand {
|
||||
if (checksumMap.containsKey("checksum_algorithm")) {
|
||||
info.setChecksumAlgorithm(checksumMap.get("checksum_algorithm").toString());
|
||||
}
|
||||
|
||||
// compare new checksum to previous checksum
|
||||
info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum()));
|
||||
|
||||
} else {
|
||||
info.setCurrentChecksum("");
|
||||
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||
info.setToBeProcessed(false);
|
||||
}
|
||||
|
||||
// compare new checksum to previous checksum
|
||||
info.setChecksumResult(compareChecksums(info.getExpectedChecksum(), info.getCurrentChecksum()));
|
||||
} catch (IOException e) {
|
||||
// bitstream located, but file missing from asset store
|
||||
info.setChecksumResult(getChecksumResultByCode(ChecksumResultCode.BITSTREAM_NOT_FOUND));
|
||||
|
@@ -74,7 +74,8 @@ public class ChecksumHistoryServiceImpl implements ChecksumHistoryService {
|
||||
if (mostRecentChecksum.getBitstream().isDeleted()) {
|
||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.BITSTREAM_MARKED_DELETED);
|
||||
} else {
|
||||
checksumResult = checksumResultService.findByCode(context, ChecksumResultCode.CHECKSUM_MATCH);
|
||||
checksumResult = checksumResultService.findByCode(context,
|
||||
mostRecentChecksum.getChecksumResult().getResultCode());
|
||||
}
|
||||
|
||||
checksumHistory.setResult(checksumResult);
|
||||
|
@@ -152,6 +152,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
||||
|
||||
osw.write("\n");
|
||||
osw.write(msg("bitstream-not-found-report"));
|
||||
osw.write(" ");
|
||||
osw.write(applyDateFormatShort(startDate));
|
||||
osw.write(" ");
|
||||
osw.write(msg("date-range-to"));
|
||||
@@ -230,6 +231,7 @@ public class SimpleReporterServiceImpl implements SimpleReporterService {
|
||||
|
||||
osw.write("\n");
|
||||
osw.write(msg("unchecked-bitstream-report"));
|
||||
osw.write(" ");
|
||||
osw.write(applyDateFormatShort(new Date()));
|
||||
osw.write("\n\n\n");
|
||||
|
||||
|
@@ -92,8 +92,8 @@ public class MostRecentChecksumDAOImpl extends AbstractHibernateDAO<MostRecentCh
|
||||
criteriaQuery.where(criteriaBuilder.and(
|
||||
criteriaBuilder.equal(mostRecentResult.get(ChecksumResult_.resultCode), resultCode),
|
||||
criteriaBuilder.lessThanOrEqualTo(
|
||||
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate)
|
||||
mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), endDate),
|
||||
criteriaBuilder.greaterThan(mostRecentChecksumRoot.get(MostRecentChecksum_.processStartDate), startDate)
|
||||
)
|
||||
);
|
||||
List<Order> orderList = new LinkedList<>();
|
||||
|
@@ -1047,4 +1047,24 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
|
||||
return (int) resp.getTotalSearchResults();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("rawtypes")
|
||||
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
|
||||
throws SearchServiceException {
|
||||
List<Collection> collectionList = new ArrayList<>();
|
||||
|
||||
DiscoverQuery discoverQuery = new DiscoverQuery();
|
||||
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
|
||||
discoverQuery.addFilterQueries("dspace.entity.type:" + entityType);
|
||||
|
||||
DiscoverResult discoverResult = searchService.search(context, discoverQuery);
|
||||
List<IndexableObject> solrIndexableObjects = discoverResult.getIndexableObjects();
|
||||
|
||||
for (IndexableObject solrCollection : solrIndexableObjects) {
|
||||
Collection c = ((IndexableCollection) solrCollection).getIndexedObject();
|
||||
collectionList.add(c);
|
||||
}
|
||||
return collectionList;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -14,6 +14,7 @@ import java.util.List;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.core.GenericDAO;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.Process;
|
||||
import org.dspace.scripts.ProcessQueryParameterContainer;
|
||||
|
||||
@@ -97,4 +98,26 @@ public interface ProcessDAO extends GenericDAO<Process> {
|
||||
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all Process objects in the database by the given user.
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param user The user to search for
|
||||
* @param limit The limit for the amount of Processes returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all Process objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* Count all the processes which is related to the given user.
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param user The user to search for
|
||||
* @return The number of results matching the query
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
int countByUser(Context context, EPerson user) throws SQLException;
|
||||
|
||||
}
|
||||
|
@@ -24,6 +24,7 @@ import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.content.dao.ProcessDAO;
|
||||
import org.dspace.core.AbstractHibernateDAO;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.Process;
|
||||
import org.dspace.scripts.ProcessQueryParameterContainer;
|
||||
import org.dspace.scripts.Process_;
|
||||
@@ -168,6 +169,33 @@ public class ProcessDAOImpl extends AbstractHibernateDAO<Process> implements Pro
|
||||
return list(context, criteriaQuery, false, Process.class, -1, -1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery<Process> criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
|
||||
|
||||
Root<Process> processRoot = criteriaQuery.from(Process.class);
|
||||
criteriaQuery.select(processRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user));
|
||||
|
||||
List<javax.persistence.criteria.Order> orderList = new LinkedList<>();
|
||||
orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID)));
|
||||
criteriaQuery.orderBy(orderList);
|
||||
|
||||
return list(context, criteriaQuery, false, Process.class, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countByUser(Context context, EPerson user) throws SQLException {
|
||||
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
|
||||
CriteriaQuery<Process> criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class);
|
||||
|
||||
Root<Process> processRoot = criteriaQuery.from(Process.class);
|
||||
criteriaQuery.select(processRoot);
|
||||
criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user));
|
||||
return count(context, criteriaQuery, criteriaBuilder, processRoot);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@@ -455,4 +455,18 @@ public interface CollectionService
|
||||
public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType)
|
||||
throws SQLException, SearchServiceException;
|
||||
|
||||
/**
|
||||
* Returns a list of all collections for a specific entity type.
|
||||
* NOTE: for better performance, this method retrieves its results from an index (cache)
|
||||
* and does not query the database directly.
|
||||
* This means that results may be stale or outdated until
|
||||
* https://github.com/DSpace/DSpace/issues/2853 is resolved."
|
||||
*
|
||||
* @param context DSpace Context
|
||||
* @param entityType limit the returned collection to those related to given entity type
|
||||
* @return list of collections found
|
||||
* @throws SearchServiceException if search error
|
||||
*/
|
||||
public List<Collection> findAllCollectionsByEntityType(Context context, String entityType)
|
||||
throws SearchServiceException;
|
||||
}
|
||||
|
@@ -314,6 +314,8 @@ public class Email {
|
||||
message.addRecipient(Message.RecipientType.TO, new InternetAddress(
|
||||
i.next()));
|
||||
}
|
||||
// Get headers defined by the template.
|
||||
String[] templateHeaders = config.getArrayProperty("mail.message.headers");
|
||||
|
||||
// Format the mail message body
|
||||
VelocityEngine templateEngine = new VelocityEngine();
|
||||
@@ -334,6 +336,7 @@ public class Email {
|
||||
repo.putStringResource(contentName, content);
|
||||
// Turn content into a template.
|
||||
template = templateEngine.getTemplate(contentName);
|
||||
templateHeaders = new String[] {};
|
||||
}
|
||||
|
||||
StringWriter writer = new StringWriter();
|
||||
@@ -351,8 +354,7 @@ public class Email {
|
||||
message.setSentDate(date);
|
||||
message.setFrom(new InternetAddress(from));
|
||||
|
||||
// Get headers defined by the template.
|
||||
for (String headerName : config.getArrayProperty("mail.message.headers")) {
|
||||
for (String headerName : templateHeaders) {
|
||||
String headerValue = (String) vctx.get(headerName);
|
||||
if ("subject".equalsIgnoreCase(headerName)) {
|
||||
if (null != headerValue) {
|
||||
|
@@ -8,12 +8,15 @@
|
||||
package org.dspace.curate;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.content.DSpaceObject;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.handle.factory.HandleServiceFactory;
|
||||
import org.dspace.handle.service.HandleService;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link Curation} script
|
||||
@@ -22,9 +25,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class CurationScriptConfiguration<T extends Curation> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -38,16 +38,37 @@ public class CurationScriptConfiguration<T extends Curation> extends ScriptConfi
|
||||
}
|
||||
|
||||
/**
|
||||
* Only admin can run Curation script via the scripts and processes endpoints.
|
||||
* @param context The relevant DSpace context
|
||||
* @return True if currentUser is admin, otherwise false
|
||||
* Only repository admins or admins of the target object can run Curation script via the scripts
|
||||
* and processes endpoints.
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param commandLineParameters the parameters that will be used to start the process if known,
|
||||
* <code>null</code> otherwise
|
||||
* @return true if the currentUser is allowed to run the script with the specified parameters or
|
||||
* at least in some case if the parameters are not yet known
|
||||
*/
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
if (commandLineParameters == null) {
|
||||
return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context)
|
||||
|| authorizeService.isItemAdmin(context);
|
||||
} else if (commandLineParameters.stream()
|
||||
.map(DSpaceCommandLineParameter::getName)
|
||||
.noneMatch("-i"::equals)) {
|
||||
return authorizeService.isAdmin(context);
|
||||
} else {
|
||||
String dspaceObjectID = commandLineParameters.stream()
|
||||
.filter(parameter -> "-i".equals(parameter.getName()))
|
||||
.map(DSpaceCommandLineParameter::getValue)
|
||||
.findFirst()
|
||||
.get();
|
||||
HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
|
||||
DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID);
|
||||
return authorizeService.isAdmin(context, dso);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -7,22 +7,14 @@
|
||||
*/
|
||||
package org.dspace.discovery;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link IndexClient} script
|
||||
*/
|
||||
public class IndexDiscoveryScriptConfiguration<T extends IndexClient> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -30,15 +22,6 @@ public class IndexDiscoveryScriptConfiguration<T extends IndexClient> extends Sc
|
||||
return dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,13 +7,8 @@
|
||||
*/
|
||||
package org.dspace.orcid.script;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Script configuration for {@link OrcidBulkPush}.
|
||||
@@ -24,20 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
*/
|
||||
public class OrcidBulkPushScriptConfiguration<T extends OrcidBulkPush> extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return dspaceRunnableClass;
|
||||
|
@@ -129,6 +129,11 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
return processes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Process> findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException {
|
||||
return processDAO.findByUser(context, eperson, limit, offset);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start(Context context, Process process) throws SQLException {
|
||||
process.setProcessStatus(ProcessStatus.RUNNING);
|
||||
@@ -311,6 +316,11 @@ public class ProcessServiceImpl implements ProcessService {
|
||||
return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countByUser(Context context, EPerson user) throws SQLException {
|
||||
return processDAO.countByUser(context, user);
|
||||
}
|
||||
|
||||
private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) {
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
@@ -37,7 +37,7 @@ public class ScriptServiceImpl implements ScriptService {
|
||||
@Override
|
||||
public List<ScriptConfiguration> getScriptConfigurations(Context context) {
|
||||
return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter(
|
||||
scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context))
|
||||
scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null))
|
||||
.sorted(Comparator.comparing(ScriptConfiguration::getName))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
@@ -7,17 +7,28 @@
|
||||
*/
|
||||
package org.dspace.scripts.configuration;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.springframework.beans.factory.BeanNameAware;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this
|
||||
* and represent a script's configuration
|
||||
* and represent a script's configuration.
|
||||
* By default script are available only to repository administrators script that have a broader audience
|
||||
* must override the {@link #isAllowedToExecute(Context, List)} method.
|
||||
*/
|
||||
public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements BeanNameAware {
|
||||
|
||||
@Autowired
|
||||
protected AuthorizeService authorizeService;
|
||||
|
||||
/**
|
||||
* The possible options for this script
|
||||
*/
|
||||
@@ -70,14 +81,23 @@ public abstract class ScriptConfiguration<T extends DSpaceRunnable> implements B
|
||||
* @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration
|
||||
*/
|
||||
public abstract void setDspaceRunnableClass(Class<T> dspaceRunnableClass);
|
||||
|
||||
/**
|
||||
* This method will return if the script is allowed to execute in the given context. This is by default set
|
||||
* to the currentUser in the context being an admin, however this can be overwritten by each script individually
|
||||
* if different rules apply
|
||||
* @param context The relevant DSpace context
|
||||
* @param commandLineParameters the parameters that will be used to start the process if known,
|
||||
* <code>null</code> otherwise
|
||||
* @return A boolean indicating whether the script is allowed to execute or not
|
||||
*/
|
||||
public abstract boolean isAllowedToExecute(Context context);
|
||||
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The getter for the options of the Script
|
||||
|
@@ -255,4 +255,26 @@ public interface ProcessService {
|
||||
*/
|
||||
List<Process> findByStatusAndCreationTimeOlderThan(Context context, List<ProcessStatus> statuses, Date date)
|
||||
throws SQLException;
|
||||
|
||||
/**
|
||||
* Returns a list of all Process objects in the database by the given user.
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param user The user to search for
|
||||
* @param limit The limit for the amount of Processes returned
|
||||
* @param offset The offset for the Processes to be returned
|
||||
* @return The list of all Process objects in the Database
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
List<Process> findByUser(Context context, EPerson user, int limit, int offset) throws SQLException;
|
||||
|
||||
/**
|
||||
* Count all the processes which is related to the given user.
|
||||
*
|
||||
* @param context The relevant DSpace context
|
||||
* @param user The user to search for
|
||||
* @return The number of results matching the query
|
||||
* @throws SQLException If something goes wrong
|
||||
*/
|
||||
int countByUser(Context context, EPerson user) throws SQLException;
|
||||
}
|
||||
|
@@ -7,13 +7,8 @@
|
||||
*/
|
||||
package org.dspace.statistics.export;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script
|
||||
@@ -21,9 +16,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedOpenUrlTracker>
|
||||
extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -41,15 +33,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration<T extends RetryFailedO
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -14,6 +14,8 @@ import java.io.IOException;
|
||||
import java.security.DigestInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@@ -153,22 +155,24 @@ public abstract class BaseBitStoreService implements BitStoreService {
|
||||
* Retrieves a map of useful metadata about the File (size, checksum, modified)
|
||||
*
|
||||
* @param file The File to analyze
|
||||
* @param attrs The map where we are storing values
|
||||
* @param attrs The list of requested metadata values
|
||||
* @return Map of updated metadatas / attrs
|
||||
* @throws IOException
|
||||
*/
|
||||
public Map about(File file, Map attrs) throws IOException {
|
||||
public Map<String, Object> about(File file, List<String> attrs) throws IOException {
|
||||
|
||||
Map<String, Object> metadata = new HashMap<String, Object>();
|
||||
|
||||
try {
|
||||
if (file != null && file.exists()) {
|
||||
this.putValueIfExistsKey(attrs, SIZE_BYTES, file.length());
|
||||
if (attrs.containsKey(CHECKSUM)) {
|
||||
attrs.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file)));
|
||||
attrs.put(CHECKSUM_ALGORITHM, CSA);
|
||||
this.putValueIfExistsKey(attrs, metadata, SIZE_BYTES, file.length());
|
||||
if (attrs.contains(CHECKSUM)) {
|
||||
metadata.put(CHECKSUM, Utils.toHex(this.generateChecksumFrom(file)));
|
||||
metadata.put(CHECKSUM_ALGORITHM, CSA);
|
||||
}
|
||||
this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(file.lastModified()));
|
||||
return attrs;
|
||||
this.putValueIfExistsKey(attrs, metadata, MODIFIED, String.valueOf(file.lastModified()));
|
||||
}
|
||||
return null;
|
||||
return metadata;
|
||||
} catch (Exception e) {
|
||||
log.error("about( FilePath: " + file.getAbsolutePath() + ", Map: " + attrs.toString() + ")", e);
|
||||
throw new IOException(e);
|
||||
@@ -204,13 +208,9 @@ public abstract class BaseBitStoreService implements BitStoreService {
|
||||
}
|
||||
}
|
||||
|
||||
protected void putValueIfExistsKey(Map attrs, String key, Object value) {
|
||||
this.putEntryIfExistsKey(attrs, key, Map.entry(key, value));
|
||||
}
|
||||
|
||||
protected void putEntryIfExistsKey(Map attrs, String key, Map.Entry entry) {
|
||||
if (attrs.containsKey(key)) {
|
||||
attrs.put(entry.getKey(), entry.getValue());
|
||||
protected void putValueIfExistsKey(List<String> attrs, Map<String, Object> metadata, String key, Object value) {
|
||||
if (attrs.contains(key)) {
|
||||
metadata.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -9,6 +9,7 @@ package org.dspace.storage.bitstore;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.dspace.content.Bitstream;
|
||||
@@ -62,13 +63,13 @@ public interface BitStoreService {
|
||||
* Obtain technical metadata about an asset in the asset store.
|
||||
*
|
||||
* @param bitstream The bitstream to describe
|
||||
* @param attrs A Map whose keys consist of desired metadata fields
|
||||
* @param attrs A List of desired metadata fields
|
||||
* @return attrs
|
||||
* A Map with key/value pairs of desired metadata
|
||||
* If file not found, then return null
|
||||
* @throws java.io.IOException If a problem occurs while obtaining metadata
|
||||
*/
|
||||
public Map about(Bitstream bitstream, Map attrs) throws IOException;
|
||||
public Map<String, Object> about(Bitstream bitstream, List<String> attrs) throws IOException;
|
||||
|
||||
/**
|
||||
* Remove an asset from the asset store.
|
||||
|
@@ -166,12 +166,9 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
|
||||
bitstream.setStoreNumber(assetstore);
|
||||
bitstreamService.update(context, bitstream);
|
||||
|
||||
Map wantedMetadata = new HashMap();
|
||||
wantedMetadata.put("size_bytes", null);
|
||||
wantedMetadata.put("checksum", null);
|
||||
wantedMetadata.put("checksum_algorithm", null);
|
||||
List<String> wantedMetadata = List.of("size_bytes", "checksum", "checksum_algorithm");
|
||||
Map<String, Object> receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata);
|
||||
|
||||
Map receivedMetadata = this.getStore(assetstore).about(bitstream, wantedMetadata);
|
||||
if (MapUtils.isEmpty(receivedMetadata)) {
|
||||
String message = "Not able to register bitstream:" + bitstream.getID() + " at path: " + bitstreamPath;
|
||||
log.error(message);
|
||||
@@ -201,13 +198,8 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map computeChecksum(Context context, Bitstream bitstream) throws IOException {
|
||||
Map wantedMetadata = new HashMap();
|
||||
wantedMetadata.put("checksum", null);
|
||||
wantedMetadata.put("checksum_algorithm", null);
|
||||
|
||||
Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
|
||||
return receivedMetadata;
|
||||
public Map<String, Object> computeChecksum(Context context, Bitstream bitstream) throws IOException {
|
||||
return this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("checksum", "checksum_algorithm"));
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -247,10 +239,9 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
|
||||
|
||||
for (Bitstream bitstream : storage) {
|
||||
UUID bid = bitstream.getID();
|
||||
Map wantedMetadata = new HashMap();
|
||||
wantedMetadata.put("size_bytes", null);
|
||||
wantedMetadata.put("modified", null);
|
||||
Map receivedMetadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, wantedMetadata);
|
||||
List<String> wantedMetadata = List.of("size_bytes", "modified");
|
||||
Map<String, Object> receivedMetadata = this.getStore(bitstream.getStoreNumber())
|
||||
.about(bitstream, wantedMetadata);
|
||||
|
||||
|
||||
// Make sure entries which do not exist are removed
|
||||
@@ -348,13 +339,11 @@ public class BitstreamStorageServiceImpl implements BitstreamStorageService, Ini
|
||||
@Nullable
|
||||
@Override
|
||||
public Long getLastModified(Bitstream bitstream) throws IOException {
|
||||
Map attrs = new HashMap();
|
||||
attrs.put("modified", null);
|
||||
attrs = this.getStore(bitstream.getStoreNumber()).about(bitstream, attrs);
|
||||
if (attrs == null || !attrs.containsKey("modified")) {
|
||||
Map<String, Object> metadata = this.getStore(bitstream.getStoreNumber()).about(bitstream, List.of("modified"));
|
||||
if (metadata == null || !metadata.containsKey("modified")) {
|
||||
return null;
|
||||
}
|
||||
return Long.valueOf(attrs.get("modified").toString());
|
||||
return Long.valueOf(metadata.get("modified").toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@@ -15,6 +15,7 @@ import java.io.InputStream;
|
||||
import java.security.DigestInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
@@ -126,13 +127,13 @@ public class DSBitStoreService extends BaseBitStoreService {
|
||||
/**
|
||||
* Obtain technical metadata about an asset in the asset store.
|
||||
*
|
||||
* @param bitstream The asset to describe
|
||||
* @param attrs A Map whose keys consist of desired metadata fields
|
||||
* @return attrs
|
||||
* A Map with key/value pairs of desired metadata
|
||||
* @throws java.io.IOException If a problem occurs while obtaining metadata
|
||||
* @param bitstream The asset to describe
|
||||
* @param attrs A List of desired metadata fields
|
||||
* @return attrs A Map with key/value pairs of desired metadata
|
||||
* @throws java.io.IOException If a problem occurs while obtaining
|
||||
* metadata
|
||||
*/
|
||||
public Map about(Bitstream bitstream, Map attrs) throws IOException {
|
||||
public Map<String, Object> about(Bitstream bitstream, List<String> attrs) throws IOException {
|
||||
try {
|
||||
// potentially expensive, since it may calculate the checksum
|
||||
File file = getFile(bitstream);
|
||||
|
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.storage.bitstore;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* When inputstream closes, then delete the file
|
||||
* http://stackoverflow.com/a/4694155/368581
|
||||
*/
|
||||
public class DeleteOnCloseFileInputStream extends FileInputStream {
|
||||
|
||||
private File file;
|
||||
|
||||
public DeleteOnCloseFileInputStream(String fileName) throws FileNotFoundException {
|
||||
this(new File(fileName));
|
||||
}
|
||||
|
||||
public DeleteOnCloseFileInputStream(File file) throws FileNotFoundException {
|
||||
super(file);
|
||||
this.file = file;
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
try {
|
||||
super.close();
|
||||
} finally {
|
||||
if (file != null) {
|
||||
file.delete();
|
||||
file = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,10 +7,19 @@
|
||||
*/
|
||||
package org.dspace.storage.bitstore;
|
||||
|
||||
import static java.lang.String.valueOf;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.DigestInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Supplier;
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
@@ -21,12 +30,11 @@ import com.amazonaws.auth.BasicAWSCredentials;
|
||||
import com.amazonaws.regions.Region;
|
||||
import com.amazonaws.regions.Regions;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.GetObjectRequest;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.services.s3.transfer.Download;
|
||||
import com.amazonaws.services.s3.transfer.TransferManager;
|
||||
import com.amazonaws.services.s3.transfer.TransferManagerBuilder;
|
||||
import com.amazonaws.services.s3.transfer.Upload;
|
||||
@@ -36,7 +44,7 @@ import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Option;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.output.NullOutputStream;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
@@ -72,7 +80,7 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
/**
|
||||
* Checksum algorithm
|
||||
*/
|
||||
private static final String CSA = "MD5";
|
||||
static final String CSA = "MD5";
|
||||
|
||||
// These settings control the way an identifier is hashed into
|
||||
// directory and file names
|
||||
@@ -139,13 +147,11 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
|
||||
/**
|
||||
* This constructor is used for test purpose.
|
||||
* In this way is possible to use a mocked instance of AmazonS3
|
||||
*
|
||||
* @param s3Service mocked AmazonS3 service
|
||||
* @param s3Service AmazonS3 service
|
||||
*/
|
||||
protected S3BitStoreService(AmazonS3 s3Service, TransferManager tm) {
|
||||
protected S3BitStoreService(AmazonS3 s3Service) {
|
||||
this.s3Service = s3Service;
|
||||
this.tm = tm;
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -205,7 +211,7 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
}
|
||||
|
||||
try {
|
||||
if (!s3Service.doesBucketExist(bucketName)) {
|
||||
if (!s3Service.doesBucketExistV2(bucketName)) {
|
||||
s3Service.createBucket(bucketName);
|
||||
log.info("Creating new S3 Bucket: " + bucketName);
|
||||
}
|
||||
@@ -253,9 +259,16 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
key = key.substring(REGISTERED_FLAG.length());
|
||||
}
|
||||
try {
|
||||
S3Object object = s3Service.getObject(new GetObjectRequest(bucketName, key));
|
||||
return (object != null) ? object.getObjectContent() : null;
|
||||
} catch (AmazonClientException e) {
|
||||
File tempFile = File.createTempFile("s3-disk-copy-" + UUID.randomUUID(), "temp");
|
||||
tempFile.deleteOnExit();
|
||||
|
||||
GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, key);
|
||||
|
||||
Download download = tm.download(getObjectRequest, tempFile);
|
||||
download.waitForCompletion();
|
||||
|
||||
return new DeleteOnCloseFileInputStream(tempFile);
|
||||
} catch (AmazonClientException | InterruptedException e) {
|
||||
log.error("get(" + key + ")", e);
|
||||
throw new IOException(e);
|
||||
}
|
||||
@@ -277,24 +290,30 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
String key = getFullKey(bitstream.getInternalId());
|
||||
//Copy istream to temp file, and send the file, with some metadata
|
||||
File scratchFile = File.createTempFile(bitstream.getInternalId(), "s3bs");
|
||||
try {
|
||||
FileUtils.copyInputStreamToFile(in, scratchFile);
|
||||
long contentLength = scratchFile.length();
|
||||
// The ETag may or may not be and MD5 digest of the object data.
|
||||
// Therefore, we precalculate before uploading
|
||||
String localChecksum = org.dspace.curate.Utils.checksum(scratchFile, CSA);
|
||||
try (
|
||||
FileOutputStream fos = new FileOutputStream(scratchFile);
|
||||
// Read through a digest input stream that will work out the MD5
|
||||
DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA));
|
||||
) {
|
||||
Utils.bufferedCopy(dis, fos);
|
||||
in.close();
|
||||
|
||||
Upload upload = tm.upload(bucketName, key, scratchFile);
|
||||
|
||||
upload.waitForUploadResult();
|
||||
|
||||
bitstream.setSizeBytes(contentLength);
|
||||
bitstream.setChecksum(localChecksum);
|
||||
bitstream.setSizeBytes(scratchFile.length());
|
||||
// we cannot use the S3 ETAG here as it could be not a MD5 in case of multipart upload (large files) or if
|
||||
// the bucket is encrypted
|
||||
bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest()));
|
||||
bitstream.setChecksumAlgorithm(CSA);
|
||||
|
||||
} catch (AmazonClientException | IOException | InterruptedException e) {
|
||||
log.error("put(" + bitstream.getInternalId() + ", is)", e);
|
||||
throw new IOException(e);
|
||||
} catch (NoSuchAlgorithmException nsae) {
|
||||
// Should never happen
|
||||
log.warn("Caught NoSuchAlgorithmException", nsae);
|
||||
} finally {
|
||||
if (!scratchFile.delete()) {
|
||||
scratchFile.deleteOnExit();
|
||||
@@ -309,61 +328,56 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
* (Does not use getContentMD5, as that is 128-bit MD5 digest calculated on caller's side)
|
||||
*
|
||||
* @param bitstream The asset to describe
|
||||
* @param attrs A Map whose keys consist of desired metadata fields
|
||||
* @param attrs A List of desired metadata fields
|
||||
* @return attrs
|
||||
* A Map with key/value pairs of desired metadata
|
||||
* If file not found, then return null
|
||||
* @throws java.io.IOException If a problem occurs while obtaining metadata
|
||||
*/
|
||||
@Override
|
||||
public Map about(Bitstream bitstream, Map attrs) throws IOException {
|
||||
public Map<String, Object> about(Bitstream bitstream, List<String> attrs) throws IOException {
|
||||
|
||||
String key = getFullKey(bitstream.getInternalId());
|
||||
// If this is a registered bitstream, strip the -R prefix before retrieving
|
||||
if (isRegisteredBitstream(key)) {
|
||||
key = key.substring(REGISTERED_FLAG.length());
|
||||
}
|
||||
|
||||
Map<String, Object> metadata = new HashMap<>();
|
||||
|
||||
try {
|
||||
|
||||
ObjectMetadata objectMetadata = s3Service.getObjectMetadata(bucketName, key);
|
||||
if (objectMetadata != null) {
|
||||
return this.about(objectMetadata, attrs);
|
||||
putValueIfExistsKey(attrs, metadata, "size_bytes", objectMetadata.getContentLength());
|
||||
putValueIfExistsKey(attrs, metadata, "modified", valueOf(objectMetadata.getLastModified().getTime()));
|
||||
}
|
||||
|
||||
putValueIfExistsKey(attrs, metadata, "checksum_algorithm", CSA);
|
||||
|
||||
if (attrs.contains("checksum")) {
|
||||
try (InputStream in = get(bitstream);
|
||||
DigestInputStream dis = new DigestInputStream(in, MessageDigest.getInstance(CSA))
|
||||
) {
|
||||
Utils.copy(dis, NullOutputStream.NULL_OUTPUT_STREAM);
|
||||
byte[] md5Digest = dis.getMessageDigest().digest();
|
||||
metadata.put("checksum", Utils.toHex(md5Digest));
|
||||
} catch (NoSuchAlgorithmException nsae) {
|
||||
// Should never happen
|
||||
log.warn("Caught NoSuchAlgorithmException", nsae);
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
} catch (AmazonS3Exception e) {
|
||||
if (e.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
|
||||
return null;
|
||||
return metadata;
|
||||
}
|
||||
} catch (AmazonClientException e) {
|
||||
log.error("about(" + key + ", attrs)", e);
|
||||
throw new IOException(e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Populates map values by checking key existence
|
||||
* <br>
|
||||
* Adds technical metadata about an asset in the asset store, like:
|
||||
* <ul>
|
||||
* <li>size_bytes</li>
|
||||
* <li>checksum</li>
|
||||
* <li>checksum_algorithm</li>
|
||||
* <li>modified</li>
|
||||
* </ul>
|
||||
*
|
||||
* @param objectMetadata containing technical data
|
||||
* @param attrs map with keys populated
|
||||
* @return Map of enriched attrs with values
|
||||
*/
|
||||
public Map about(ObjectMetadata objectMetadata, Map attrs) {
|
||||
if (objectMetadata != null) {
|
||||
this.putValueIfExistsKey(attrs, SIZE_BYTES, objectMetadata.getContentLength());
|
||||
|
||||
// put CHECKSUM_ALGORITHM if exists CHECKSUM
|
||||
this.putValueIfExistsKey(attrs, CHECKSUM, objectMetadata.getETag());
|
||||
this.putEntryIfExistsKey(attrs, CHECKSUM, Map.entry(CHECKSUM_ALGORITHM, CSA));
|
||||
|
||||
this.putValueIfExistsKey(attrs, MODIFIED, String.valueOf(objectMetadata.getLastModified().getTime()));
|
||||
}
|
||||
return attrs;
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -527,13 +541,14 @@ public class S3BitStoreService extends BaseBitStoreService {
|
||||
|
||||
String accessKey = command.getOptionValue("a");
|
||||
String secretKey = command.getOptionValue("s");
|
||||
String assetFile = command.getOptionValue("f");
|
||||
|
||||
S3BitStoreService store = new S3BitStoreService();
|
||||
|
||||
AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
|
||||
|
||||
store.s3Service = new AmazonS3Client(awsCredentials);
|
||||
store.s3Service = AmazonS3ClientBuilder.standard()
|
||||
.withCredentials(new AWSStaticCredentialsProvider(awsCredentials))
|
||||
.build();
|
||||
|
||||
//Todo configurable region
|
||||
Region usEast1 = Region.getRegion(Regions.US_EAST_1);
|
||||
|
@@ -102,7 +102,7 @@ public interface BitstreamStorageService {
|
||||
public UUID register(Context context, Bitstream bitstream, int assetstore, String bitstreamPath)
|
||||
throws SQLException, IOException, AuthorizeException;
|
||||
|
||||
public Map computeChecksum(Context context, Bitstream bitstream) throws IOException;
|
||||
public Map<String, Object> computeChecksum(Context context, Bitstream bitstream) throws IOException;
|
||||
|
||||
/**
|
||||
* Does the internal_id column in the bitstream row indicate the bitstream
|
||||
|
@@ -7,13 +7,8 @@
|
||||
*/
|
||||
package org.dspace.submit.migration;
|
||||
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* The {@link ScriptConfiguration} for the {@link SubmissionFormsMigration} script
|
||||
@@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
public class SubmissionFormsMigrationCliScriptConfiguration<T extends SubmissionFormsMigration>
|
||||
extends ScriptConfiguration<T> {
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -38,15 +30,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration<T extends Submission
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -7,7 +7,12 @@
|
||||
*/
|
||||
package org.dspace.submit.migration;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
* Subclass of {@link SubmissionFormsMigrationCliScriptConfiguration} to be use in rest/scripts.xml configuration so
|
||||
@@ -15,10 +20,37 @@ import org.dspace.core.Context;
|
||||
*
|
||||
* @author Maria Verdonck (Atmire) on 05/01/2021
|
||||
*/
|
||||
public class SubmissionFormsMigrationScriptConfiguration extends SubmissionFormsMigrationCliScriptConfiguration {
|
||||
public class SubmissionFormsMigrationScriptConfiguration<T extends SubmissionFormsMigration>
|
||||
extends ScriptConfiguration<T> {
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
public Class<T> getDspaceRunnableClass() {
|
||||
return this.dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
Options options = new Options();
|
||||
|
||||
options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location");
|
||||
options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location");
|
||||
options.addOption("h", "help", false, "help");
|
||||
|
||||
super.options = options;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context, List<DSpaceCommandLineParameter> commandLineParameters) {
|
||||
// Script is not allowed to be executed from REST side
|
||||
return false;
|
||||
}
|
||||
|
@@ -8,15 +8,11 @@
|
||||
|
||||
package org.dspace.subscriptions;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.AuthorizeServiceImpl;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.DSpaceRunnable;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
/**
|
||||
* Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them
|
||||
@@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration<T
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Autowired
|
||||
private AuthorizeServiceImpl authorizeService;
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (Objects.isNull(options)) {
|
||||
|
@@ -162,8 +162,8 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
|
||||
return (B) this;
|
||||
}
|
||||
/**
|
||||
* Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other
|
||||
* READ permissions will be removed
|
||||
* Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson.
|
||||
* If another ADMIN policy is in place for an eperson it will be replaced
|
||||
*
|
||||
* @param dso
|
||||
* the DSpaceObject on which grant the permission
|
||||
|
@@ -353,9 +353,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an admin group for the collection with the specified members
|
||||
* Assign the admin permission to the specified eperson
|
||||
*
|
||||
* @param ePerson epersons to add to the admin group
|
||||
* @param ePerson the eperson that will get the ADMIN permission on the item
|
||||
* @return this builder
|
||||
* @throws SQLException
|
||||
* @throws AuthorizeException
|
||||
|
@@ -113,6 +113,9 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
|
||||
}
|
||||
|
||||
public static void deleteProcess(Integer integer) throws SQLException, IOException {
|
||||
if (integer == null) {
|
||||
return;
|
||||
}
|
||||
try (Context c = new Context()) {
|
||||
c.turnOffAuthorisationSystem();
|
||||
Process process = processService.find(c, integer);
|
||||
|
@@ -8,21 +8,13 @@
|
||||
package org.dspace.scripts;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.impl.MockDSpaceRunnableScript;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableScript> extends ScriptConfiguration<T> {
|
||||
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableS
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -0,0 +1,434 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.storage.bitstore;
|
||||
|
||||
import static com.amazonaws.regions.Regions.DEFAULT_REGION;
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.dspace.storage.bitstore.S3BitStoreService.CSA;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.amazonaws.auth.AWSStaticCredentialsProvider;
|
||||
import com.amazonaws.auth.AnonymousAWSCredentials;
|
||||
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
|
||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||
import com.amazonaws.services.s3.model.Bucket;
|
||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||
import io.findify.s3mock.S3Mock;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.dspace.AbstractIntegrationTestWithDatabase;
|
||||
import org.dspace.app.matcher.LambdaMatcher;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.BitstreamBuilder;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Utils;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
/**
|
||||
* @author Luca Giamminonni (luca.giamminonni at 4science.com)
|
||||
*/
|
||||
public class S3BitStoreServiceIT extends AbstractIntegrationTestWithDatabase {
|
||||
|
||||
private static final String DEFAULT_BUCKET_NAME = "dspace-asset-localhost";
|
||||
|
||||
private S3BitStoreService s3BitStoreService;
|
||||
|
||||
private AmazonS3 amazonS3Client;
|
||||
|
||||
private S3Mock s3Mock;
|
||||
|
||||
private Collection collection;
|
||||
|
||||
private File s3Directory;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
|
||||
s3Directory = new File(System.getProperty("java.io.tmpdir"), "s3");
|
||||
|
||||
s3Mock = S3Mock.create(8001, s3Directory.getAbsolutePath());
|
||||
s3Mock.start();
|
||||
|
||||
amazonS3Client = createAmazonS3Client();
|
||||
|
||||
s3BitStoreService = new S3BitStoreService(amazonS3Client);
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
|
||||
parentCommunity = CommunityBuilder.createCommunity(context)
|
||||
.build();
|
||||
|
||||
collection = CollectionBuilder.createCollection(context, parentCommunity)
|
||||
.build();
|
||||
|
||||
context.restoreAuthSystemState();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanUp() throws IOException {
|
||||
FileUtils.deleteDirectory(s3Directory);
|
||||
s3Mock.shutdown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBitstreamPutAndGetWithAlreadyPresentBucket() throws IOException {
|
||||
|
||||
String bucketName = "testbucket";
|
||||
|
||||
amazonS3Client.createBucket(bucketName);
|
||||
|
||||
s3BitStoreService.setBucketName(bucketName);
|
||||
s3BitStoreService.init();
|
||||
|
||||
assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(bucketName)));
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
String content = "Test bitstream content";
|
||||
Bitstream bitstream = createBitstream(content);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
s3BitStoreService.put(bitstream, toInputStream(content));
|
||||
|
||||
String expectedChecksum = Utils.toHex(generateChecksum(content));
|
||||
|
||||
assertThat(bitstream.getSizeBytes(), is((long) content.length()));
|
||||
assertThat(bitstream.getChecksum(), is(expectedChecksum));
|
||||
assertThat(bitstream.getChecksumAlgorithm(), is(CSA));
|
||||
|
||||
InputStream inputStream = s3BitStoreService.get(bitstream);
|
||||
assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
|
||||
|
||||
String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
|
||||
ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(bucketName, key);
|
||||
assertThat(objectMetadata.getContentMD5(), is(expectedChecksum));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBitstreamPutAndGetWithoutSpecifingBucket() throws IOException {
|
||||
|
||||
s3BitStoreService.init();
|
||||
|
||||
assertThat(s3BitStoreService.getBucketName(), is(DEFAULT_BUCKET_NAME));
|
||||
|
||||
assertThat(amazonS3Client.listBuckets(), contains(bucketNamed(DEFAULT_BUCKET_NAME)));
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
String content = "Test bitstream content";
|
||||
Bitstream bitstream = createBitstream(content);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
s3BitStoreService.put(bitstream, toInputStream(content));
|
||||
|
||||
String expectedChecksum = Utils.toHex(generateChecksum(content));
|
||||
|
||||
assertThat(bitstream.getSizeBytes(), is((long) content.length()));
|
||||
assertThat(bitstream.getChecksum(), is(expectedChecksum));
|
||||
assertThat(bitstream.getChecksumAlgorithm(), is(CSA));
|
||||
|
||||
InputStream inputStream = s3BitStoreService.get(bitstream);
|
||||
assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
|
||||
|
||||
String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
|
||||
ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key);
|
||||
assertThat(objectMetadata.getContentMD5(), is(expectedChecksum));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBitstreamPutAndGetWithSubFolder() throws IOException {
|
||||
|
||||
s3BitStoreService.setSubfolder("test/DSpace7/");
|
||||
s3BitStoreService.init();
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
String content = "Test bitstream content";
|
||||
Bitstream bitstream = createBitstream(content);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
s3BitStoreService.put(bitstream, toInputStream(content));
|
||||
|
||||
InputStream inputStream = s3BitStoreService.get(bitstream);
|
||||
assertThat(IOUtils.toString(inputStream, UTF_8), is(content));
|
||||
|
||||
String key = s3BitStoreService.getFullKey(bitstream.getInternalId());
|
||||
assertThat(key, startsWith("test/DSpace7/"));
|
||||
|
||||
ObjectMetadata objectMetadata = amazonS3Client.getObjectMetadata(DEFAULT_BUCKET_NAME, key);
|
||||
assertThat(objectMetadata, notNullValue());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBitstreamDeletion() throws IOException {
|
||||
|
||||
s3BitStoreService.init();
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
String content = "Test bitstream content";
|
||||
Bitstream bitstream = createBitstream(content);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
s3BitStoreService.put(bitstream, toInputStream(content));
|
||||
|
||||
assertThat(s3BitStoreService.get(bitstream), notNullValue());
|
||||
|
||||
s3BitStoreService.remove(bitstream);
|
||||
|
||||
IOException exception = assertThrows(IOException.class, () -> s3BitStoreService.get(bitstream));
|
||||
assertThat(exception.getCause(), instanceOf(AmazonS3Exception.class));
|
||||
assertThat(((AmazonS3Exception) exception.getCause()).getStatusCode(), is(404));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbout() throws IOException {
|
||||
|
||||
s3BitStoreService.init();
|
||||
|
||||
context.turnOffAuthorisationSystem();
|
||||
String content = "Test bitstream content";
|
||||
Bitstream bitstream = createBitstream(content);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
s3BitStoreService.put(bitstream, toInputStream(content));
|
||||
|
||||
Map<String, Object> about = s3BitStoreService.about(bitstream, List.of());
|
||||
assertThat(about.size(), is(0));
|
||||
|
||||
about = s3BitStoreService.about(bitstream, List.of("size_bytes"));
|
||||
assertThat(about, hasEntry("size_bytes", 22L));
|
||||
assertThat(about.size(), is(1));
|
||||
|
||||
about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified"));
|
||||
assertThat(about, hasEntry("size_bytes", 22L));
|
||||
assertThat(about, hasEntry(is("modified"), notNullValue()));
|
||||
assertThat(about.size(), is(2));
|
||||
|
||||
String expectedChecksum = Utils.toHex(generateChecksum(content));
|
||||
|
||||
about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum"));
|
||||
assertThat(about, hasEntry("size_bytes", 22L));
|
||||
assertThat(about, hasEntry(is("modified"), notNullValue()));
|
||||
assertThat(about, hasEntry("checksum", expectedChecksum));
|
||||
assertThat(about.size(), is(3));
|
||||
|
||||
about = s3BitStoreService.about(bitstream, List.of("size_bytes", "modified", "checksum", "checksum_algorithm"));
|
||||
assertThat(about, hasEntry("size_bytes", 22L));
|
||||
assertThat(about, hasEntry(is("modified"), notNullValue()));
|
||||
assertThat(about, hasEntry("checksum", expectedChecksum));
|
||||
assertThat(about, hasEntry("checksum_algorithm", CSA));
|
||||
assertThat(about.size(), is(4));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void handleRegisteredIdentifierPrefixInS3() {
|
||||
String trueBitStreamId = "012345";
|
||||
String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId;
|
||||
// Should be detected as registered bitstream
|
||||
assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void stripRegisteredBitstreamPrefixWhenCalculatingPath() {
|
||||
// Set paths and IDs
|
||||
String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf";
|
||||
String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path;
|
||||
// Paths should be equal, since the getRelativePath method should strip the registered -R prefix
|
||||
String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId);
|
||||
assertEquals(s3Path, relativeRegisteredPath);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() {
|
||||
String path = "01234567890123456789";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() {
|
||||
String path = "0";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "0" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() {
|
||||
String path = "01234";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() {
|
||||
String path = "012345";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException {
|
||||
StringBuilder path = new StringBuilder("01");
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("3");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("4");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("56789");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException {
|
||||
StringBuilder path = new StringBuilder("01");
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("3");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("4");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("56789");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() {
|
||||
String sInternalId = new StringBuilder("01")
|
||||
.append(File.separator)
|
||||
.append("22")
|
||||
.append(File.separator)
|
||||
.append("33")
|
||||
.append(File.separator)
|
||||
.append("4455")
|
||||
.toString();
|
||||
String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId);
|
||||
assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator)));
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator)));
|
||||
assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator)));
|
||||
}
|
||||
|
||||
private byte[] generateChecksum(String content) {
|
||||
try {
|
||||
MessageDigest m = MessageDigest.getInstance("MD5");
|
||||
m.update(content.getBytes());
|
||||
return m.digest();
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private AmazonS3 createAmazonS3Client() {
|
||||
return AmazonS3ClientBuilder.standard()
|
||||
.withCredentials(new AWSStaticCredentialsProvider(new AnonymousAWSCredentials()))
|
||||
.withEndpointConfiguration(new EndpointConfiguration("http://127.0.0.1:8001", DEFAULT_REGION.getName()))
|
||||
.build();
|
||||
}
|
||||
|
||||
private Item createItem() {
|
||||
return ItemBuilder.createItem(context, collection)
|
||||
.withTitle("Test item")
|
||||
.build();
|
||||
}
|
||||
|
||||
private Bitstream createBitstream(String content) {
|
||||
try {
|
||||
return BitstreamBuilder
|
||||
.createBitstream(context, createItem(), toInputStream(content))
|
||||
.build();
|
||||
} catch (SQLException | AuthorizeException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private Matcher<? super Bucket> bucketNamed(String name) {
|
||||
return LambdaMatcher.matches(bucket -> bucket.getName().equals(name));
|
||||
}
|
||||
|
||||
private InputStream toInputStream(String content) {
|
||||
return IOUtils.toInputStream(content, UTF_8);
|
||||
}
|
||||
|
||||
private int computeSlashes(String internalId) {
|
||||
int minimum = internalId.length();
|
||||
int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel;
|
||||
int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel);
|
||||
int slashes = slashesPerLevel + odd;
|
||||
return Math.min(slashes, S3BitStoreService.directoryLevels);
|
||||
}
|
||||
|
||||
}
|
@@ -1,480 +0,0 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.storage.bitstore;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.isEmptyOrNullString;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.ArgumentMatchers.startsWith;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import com.amazonaws.regions.Regions;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.amazonaws.services.s3.AmazonS3Client;
|
||||
import com.amazonaws.services.s3.model.GetObjectRequest;
|
||||
import com.amazonaws.services.s3.model.PutObjectRequest;
|
||||
import com.amazonaws.services.s3.model.PutObjectResult;
|
||||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import com.amazonaws.services.s3.model.S3ObjectInputStream;
|
||||
import com.amazonaws.services.s3.transfer.TransferManager;
|
||||
import com.amazonaws.services.s3.transfer.Upload;
|
||||
import com.amazonaws.services.s3.transfer.model.UploadResult;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.dspace.AbstractUnitTest;
|
||||
import org.dspace.content.Bitstream;
|
||||
import org.dspace.curate.Utils;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mockito.ArgumentMatchers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.MockedStatic;
|
||||
import org.mockito.Mockito;
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @author Vincenzo Mecca (vins01-4science - vincenzo.mecca at 4science.com)
|
||||
*
|
||||
*/
|
||||
public class S3BitStoreServiceTest extends AbstractUnitTest {
|
||||
|
||||
private S3BitStoreService s3BitStoreService;
|
||||
|
||||
@Mock
|
||||
private AmazonS3Client s3Service;
|
||||
|
||||
@Mock
|
||||
private TransferManager tm;
|
||||
|
||||
@Mock
|
||||
private Bitstream bitstream;
|
||||
|
||||
@Mock
|
||||
private Bitstream externalBitstream;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
this.s3BitStoreService = new S3BitStoreService(s3Service, tm);
|
||||
}
|
||||
|
||||
private Supplier<AmazonS3> mockedServiceSupplier() {
|
||||
return () -> this.s3Service;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBucketWhenInitThenUsesSameBucket() throws IOException {
|
||||
String bucketName = "Bucket0";
|
||||
s3BitStoreService.setBucketName(bucketName);
|
||||
when(this.s3Service.doesBucketExist(bucketName)).thenReturn(false);
|
||||
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
|
||||
verify(this.s3Service).doesBucketExist(bucketName);
|
||||
verify(this.s3Service, Mockito.times(1)).createBucket(bucketName);
|
||||
assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenEmptyBucketWhenInitThenUsesDefaultBucket() throws IOException {
|
||||
assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString());
|
||||
when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false);
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
|
||||
verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
|
||||
assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
|
||||
assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenAccessKeysWhenInitThenVerifiesCorrectBuilderCreation() throws IOException {
|
||||
assertThat(s3BitStoreService.getAwsAccessKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsSecretKey(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getBucketName(), isEmptyOrNullString());
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
when(this.s3Service.doesBucketExist(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX))).thenReturn(false);
|
||||
|
||||
final String awsAccessKey = "ACCESS_KEY";
|
||||
final String awsSecretKey = "SECRET_KEY";
|
||||
|
||||
this.s3BitStoreService.setAwsAccessKey(awsAccessKey);
|
||||
this.s3BitStoreService.setAwsSecretKey(awsSecretKey);
|
||||
|
||||
try (MockedStatic<S3BitStoreService> mockedS3BitStore = Mockito.mockStatic(S3BitStoreService.class)) {
|
||||
mockedS3BitStore
|
||||
.when(() ->
|
||||
S3BitStoreService.amazonClientBuilderBy(
|
||||
ArgumentMatchers.any(Regions.class),
|
||||
ArgumentMatchers.argThat(
|
||||
credentials ->
|
||||
awsAccessKey.equals(credentials.getAWSAccessKeyId()) &&
|
||||
awsSecretKey.equals(credentials.getAWSSecretKey())
|
||||
)
|
||||
)
|
||||
)
|
||||
.thenReturn(this.mockedServiceSupplier());
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
|
||||
mockedS3BitStore.verify(
|
||||
() ->
|
||||
S3BitStoreService.amazonClientBuilderBy(
|
||||
ArgumentMatchers.any(Regions.class),
|
||||
ArgumentMatchers.argThat(
|
||||
credentials ->
|
||||
awsAccessKey.equals(credentials.getAWSAccessKeyId()) &&
|
||||
awsSecretKey.equals(credentials.getAWSSecretKey())
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
verify(this.s3Service, Mockito.times(1)).createBucket(startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
|
||||
assertThat(s3BitStoreService.getBucketName(), Matchers.startsWith(S3BitStoreService.DEFAULT_BUCKET_PREFIX));
|
||||
assertThat(s3BitStoreService.getAwsAccessKey(), Matchers.equalTo(awsAccessKey));
|
||||
assertThat(s3BitStoreService.getAwsSecretKey(), Matchers.equalTo(awsSecretKey));
|
||||
assertThat(s3BitStoreService.getAwsRegionName(), isEmptyOrNullString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBucketBitStreamIdInputStreamWhenRetrievingFromS3ThenUsesBucketBitStreamId() throws IOException {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "BitStreamId";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
S3Object object = Mockito.mock(S3Object.class);
|
||||
S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class);
|
||||
when(object.getObjectContent()).thenReturn(inputStream);
|
||||
when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream));
|
||||
|
||||
verify(this.s3Service).getObject(
|
||||
ArgumentMatchers.argThat(
|
||||
request ->
|
||||
bucketName.contentEquals(request.getBucketName()) &&
|
||||
bitStreamId.contentEquals(request.getKey())
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBucketBitStreamIdWhenNothingFoundOnS3ThenReturnsNull() throws IOException {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "BitStreamId";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(null);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
assertThat(this.s3BitStoreService.get(bitstream), Matchers.nullValue());
|
||||
|
||||
verify(this.s3Service).getObject(
|
||||
ArgumentMatchers.argThat(
|
||||
request ->
|
||||
bucketName.contentEquals(request.getBucketName()) &&
|
||||
bitStreamId.contentEquals(request.getKey())
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenSubFolderWhenRequestsItemFromS3ThenTheIdentifierShouldHaveProperPath() throws IOException {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "012345";
|
||||
String subfolder = "/test/DSpace7/";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
this.s3BitStoreService.setSubfolder(subfolder);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
S3Object object = Mockito.mock(S3Object.class);
|
||||
S3ObjectInputStream inputStream = Mockito.mock(S3ObjectInputStream.class);
|
||||
when(object.getObjectContent()).thenReturn(inputStream);
|
||||
when(this.s3Service.getObject(ArgumentMatchers.any(GetObjectRequest.class))).thenReturn(object);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
assertThat(this.s3BitStoreService.get(bitstream), Matchers.equalTo(inputStream));
|
||||
|
||||
verify(this.s3Service).getObject(
|
||||
ArgumentMatchers.argThat(
|
||||
request ->
|
||||
bucketName.equals(request.getBucketName()) &&
|
||||
request.getKey().startsWith(subfolder) &&
|
||||
request.getKey().contains(bitStreamId) &&
|
||||
!request.getKey().contains(File.separator + File.separator)
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void handleRegisteredIdentifierPrefixInS3() {
|
||||
String trueBitStreamId = "012345";
|
||||
String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + trueBitStreamId;
|
||||
// Should be detected as registered bitstream
|
||||
assertTrue(this.s3BitStoreService.isRegisteredBitstream(registeredBitstreamId));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void stripRegisteredBitstreamPrefixWhenCalculatingPath() {
|
||||
// Set paths and IDs
|
||||
String s3Path = "UNIQUE_S3_PATH/test/bitstream.pdf";
|
||||
String registeredBitstreamId = s3BitStoreService.REGISTERED_FLAG + s3Path;
|
||||
// Paths should be equal, since the getRelativePath method should strip the registered -R prefix
|
||||
String relativeRegisteredPath = this.s3BitStoreService.getRelativePath(registeredBitstreamId);
|
||||
assertEquals(s3Path, relativeRegisteredPath);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierLongerThanPossibleWhenIntermediatePathIsComputedThenIsSplittedAndTruncated() {
|
||||
String path = "01234567890123456789";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierShorterThanAFolderLengthWhenIntermediatePathIsComputedThenIsSingleFolder() {
|
||||
String path = "0";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "0" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenPartialBitStreamIdentifierWhenIntermediatePathIsComputedThenIsCompletlySplitted() {
|
||||
String path = "01234";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "4" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenMaxLengthBitStreamIdentifierWhenIntermediatePathIsComputedThenIsSplittedAllAsSubfolder() {
|
||||
String path = "012345";
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path);
|
||||
String expectedPath = "01" + File.separator + "23" + File.separator + "45" + File.separator;
|
||||
assertThat(computedPath, equalTo(expectedPath));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierWhenIntermediatePathIsComputedThenNotEndingDoubleSlash() throws IOException {
|
||||
StringBuilder path = new StringBuilder("01");
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("3");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("4");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
|
||||
path.append("56789");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator + File.separator)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentidierWhenIntermediatePathIsComputedThenMustBeSplitted() throws IOException {
|
||||
StringBuilder path = new StringBuilder("01");
|
||||
String computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
int slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("2");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("3");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("4");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
|
||||
path.append("56789");
|
||||
computedPath = this.s3BitStoreService.getIntermediatePath(path.toString());
|
||||
slashes = computeSlashes(path.toString());
|
||||
assertThat(computedPath, Matchers.endsWith(File.separator));
|
||||
assertThat(computedPath.split(File.separator).length, Matchers.equalTo(slashes));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamIdentifierWithSlashesWhenSanitizedThenSlashesMustBeRemoved() {
|
||||
String sInternalId = new StringBuilder("01")
|
||||
.append(File.separator)
|
||||
.append("22")
|
||||
.append(File.separator)
|
||||
.append("33")
|
||||
.append(File.separator)
|
||||
.append("4455")
|
||||
.toString();
|
||||
String computedPath = this.s3BitStoreService.sanitizeIdentifier(sInternalId);
|
||||
assertThat(computedPath, Matchers.not(Matchers.startsWith(File.separator)));
|
||||
assertThat(computedPath, Matchers.not(Matchers.endsWith(File.separator)));
|
||||
assertThat(computedPath, Matchers.not(Matchers.containsString(File.separator)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamWhenRemoveThenCallS3DeleteMethod() throws Exception {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "BitStreamId";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
this.s3BitStoreService.remove(bitstream);
|
||||
|
||||
verify(this.s3Service, Mockito.times(1)).deleteObject(ArgumentMatchers.eq(bucketName),
|
||||
ArgumentMatchers.eq(bitStreamId));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamWhenPutThenCallS3PutMethodAndStoresInBitStream() throws Exception {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "BitStreamId";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
File file = Mockito.mock(File.class);
|
||||
InputStream in = Mockito.mock(InputStream.class);
|
||||
PutObjectResult putObjectResult = Mockito.mock(PutObjectResult.class);
|
||||
Upload upload = Mockito.mock(Upload.class);
|
||||
UploadResult uploadResult = Mockito.mock(UploadResult.class);
|
||||
when(upload.waitForUploadResult()).thenReturn(uploadResult);
|
||||
String mockedTag = "1a7771d5fdd7bfdfc84033c70b1ba555";
|
||||
when(file.length()).thenReturn(8L);
|
||||
try (MockedStatic<File> fileMock = Mockito.mockStatic(File.class)) {
|
||||
try (MockedStatic<FileUtils> fileUtilsMock = Mockito.mockStatic(FileUtils.class)) {
|
||||
try (MockedStatic<Utils> curateUtils = Mockito.mockStatic(Utils.class)) {
|
||||
curateUtils.when(() -> Utils.checksum((File) ArgumentMatchers.any(), ArgumentMatchers.any()))
|
||||
.thenReturn(mockedTag);
|
||||
|
||||
fileMock
|
||||
.when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
|
||||
.thenReturn(file);
|
||||
|
||||
when(this.tm.upload(ArgumentMatchers.any(), ArgumentMatchers.any(), ArgumentMatchers.any()))
|
||||
.thenReturn(upload);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
this.s3BitStoreService.put(bitstream, in);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
verify(this.bitstream, Mockito.times(1)).setSizeBytes(
|
||||
ArgumentMatchers.eq(8L)
|
||||
);
|
||||
|
||||
verify(this.bitstream, Mockito.times(1)).setChecksum(
|
||||
ArgumentMatchers.eq(mockedTag)
|
||||
);
|
||||
|
||||
verify(this.tm, Mockito.times(1)).upload(
|
||||
ArgumentMatchers.eq(bucketName),
|
||||
ArgumentMatchers.eq(bitStreamId),
|
||||
ArgumentMatchers.eq(file)
|
||||
);
|
||||
|
||||
verify(file, Mockito.times(1)).delete();
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void givenBitStreamWhenCallingPutFileCopyingThrowsIOExceptionPutThenFileIsRemovedAndStreamClosed()
|
||||
throws Exception {
|
||||
String bucketName = "BucketTest";
|
||||
String bitStreamId = "BitStreamId";
|
||||
this.s3BitStoreService.setBucketName(bucketName);
|
||||
this.s3BitStoreService.setUseRelativePath(false);
|
||||
when(bitstream.getInternalId()).thenReturn(bitStreamId);
|
||||
|
||||
File file = Mockito.mock(File.class);
|
||||
InputStream in = Mockito.mock(InputStream.class);
|
||||
try (MockedStatic<File> fileMock = Mockito.mockStatic(File.class)) {
|
||||
try (MockedStatic<FileUtils> fileUtilsMock = Mockito.mockStatic(FileUtils.class)) {
|
||||
fileUtilsMock
|
||||
.when(() -> FileUtils.copyInputStreamToFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
|
||||
.thenThrow(IOException.class);
|
||||
fileMock
|
||||
.when(() -> File.createTempFile(ArgumentMatchers.any(), ArgumentMatchers.any()))
|
||||
.thenReturn(file);
|
||||
|
||||
this.s3BitStoreService.init();
|
||||
assertThrows(IOException.class, () -> this.s3BitStoreService.put(bitstream, in));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
verify(this.bitstream, Mockito.never()).setSizeBytes(ArgumentMatchers.any(Long.class));
|
||||
|
||||
verify(this.bitstream, Mockito.never()).setChecksum(ArgumentMatchers.any(String.class));
|
||||
|
||||
verify(this.s3Service, Mockito.never()).putObject(ArgumentMatchers.any(PutObjectRequest.class));
|
||||
|
||||
verify(file, Mockito.times(1)).delete();
|
||||
|
||||
}
|
||||
|
||||
private int computeSlashes(String internalId) {
|
||||
int minimum = internalId.length();
|
||||
int slashesPerLevel = minimum / S3BitStoreService.digitsPerLevel;
|
||||
int odd = Math.min(1, minimum % S3BitStoreService.digitsPerLevel);
|
||||
int slashes = slashesPerLevel + odd;
|
||||
return Math.min(slashes, S3BitStoreService.directoryLevels);
|
||||
}
|
||||
|
||||
}
|
@@ -118,7 +118,8 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a solr search URL.
|
||||
* Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2.
|
||||
* https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2
|
||||
*
|
||||
* @param query the search terms
|
||||
* @param manifestId the id of the manifest in which to search
|
||||
@@ -132,8 +133,9 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
|
||||
solrQuery.set("hl.ocr.fl", "ocr_text");
|
||||
solrQuery.set("hl.ocr.contextBlock", "line");
|
||||
solrQuery.set("hl.ocr.contextSize", "2");
|
||||
solrQuery.set("hl.snippets", "10");
|
||||
solrQuery.set("hl.ocr.trackPages", "off");
|
||||
solrQuery.set("hl.snippets", "8192");
|
||||
solrQuery.set("hl.ocr.maxPassages", "8192");
|
||||
solrQuery.set("hl.ocr.trackPages", "on");
|
||||
solrQuery.set("hl.ocr.limitBlock","page");
|
||||
solrQuery.set("hl.ocr.absoluteHighlights", "true");
|
||||
|
||||
|
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* The contents of this file are subject to the license and copyright
|
||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
||||
* tree and available online at
|
||||
*
|
||||
* http://www.dspace.org/license/
|
||||
*/
|
||||
package org.dspace.xoai.app.plugins;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.List;
|
||||
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Element;
|
||||
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
|
||||
import org.dspace.access.status.factory.AccessStatusServiceFactory;
|
||||
import org.dspace.access.status.service.AccessStatusService;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin;
|
||||
import org.dspace.xoai.util.ItemUtils;
|
||||
|
||||
/**
|
||||
* AccessStatusElementItemCompilePlugin aims to add structured information about the
|
||||
* Access Status of the item (if any).
|
||||
|
||||
* The xoai document will be enriched with a structure like that
|
||||
* <pre>
|
||||
* {@code
|
||||
* <element name="others">
|
||||
* <element name="access-status">
|
||||
* <field name="value">open.access</field>
|
||||
* </element>
|
||||
* </element>
|
||||
* }
|
||||
* </pre>
|
||||
* Returning Values are based on:
|
||||
* @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper
|
||||
*/
|
||||
public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin {
|
||||
|
||||
@Override
|
||||
public Metadata additionalMetadata(Context context, Metadata metadata, Item item) {
|
||||
AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService();
|
||||
|
||||
try {
|
||||
String accessStatusType;
|
||||
accessStatusType = accessStatusService.getAccessStatus(context, item);
|
||||
|
||||
Element accessStatus = ItemUtils.create("access-status");
|
||||
accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType));
|
||||
|
||||
Element others;
|
||||
List<Element> elements = metadata.getElement();
|
||||
if (ItemUtils.getElement(elements, "others") != null) {
|
||||
others = ItemUtils.getElement(elements, "others");
|
||||
} else {
|
||||
others = ItemUtils.create("others");
|
||||
}
|
||||
others.getElement().add(accessStatus);
|
||||
|
||||
} catch (SQLException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
}
|
@@ -12,18 +12,23 @@ import java.util.List;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.dspace.app.rest.converter.ConverterService;
|
||||
import org.dspace.app.rest.exception.DSpaceBadRequestException;
|
||||
import org.dspace.app.rest.model.ProcessRest;
|
||||
import org.dspace.app.rest.model.ScriptRest;
|
||||
import org.dspace.app.rest.model.hateoas.ProcessResource;
|
||||
import org.dspace.app.rest.repository.ScriptRestRepository;
|
||||
import org.dspace.app.rest.utils.ContextUtil;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.dspace.services.RequestService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.rest.webmvc.ControllerUtils;
|
||||
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
|
||||
import org.springframework.hateoas.RepresentationModel;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.web.bind.annotation.PathVariable;
|
||||
@@ -48,6 +53,9 @@ public class ScriptProcessesController {
|
||||
@Autowired
|
||||
private ScriptRestRepository scriptRestRepository;
|
||||
|
||||
@Autowired
|
||||
private ScriptService scriptService;
|
||||
|
||||
@Autowired
|
||||
private RequestService requestService;
|
||||
|
||||
@@ -59,8 +67,8 @@ public class ScriptProcessesController {
|
||||
* @return The ProcessResource object for the created process
|
||||
* @throws Exception If something goes wrong
|
||||
*/
|
||||
@RequestMapping(method = RequestMethod.POST)
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
@RequestMapping(method = RequestMethod.POST, consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
@PreAuthorize("hasAuthority('AUTHENTICATED')")
|
||||
public ResponseEntity<RepresentationModel<?>> startProcess(
|
||||
@PathVariable(name = "name") String scriptName,
|
||||
@RequestParam(name = "file", required = false) List<MultipartFile> files)
|
||||
@@ -75,4 +83,21 @@ public class ScriptProcessesController {
|
||||
return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource);
|
||||
}
|
||||
|
||||
@RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE)
|
||||
@PreAuthorize("hasAuthority('AUTHENTICATED')")
|
||||
public ResponseEntity<RepresentationModel<?>> startProcessInvalidMimeType(
|
||||
@PathVariable(name = "name") String scriptName)
|
||||
throws Exception {
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Starting Process for Script with name: " + scriptName);
|
||||
}
|
||||
Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest());
|
||||
ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName);
|
||||
|
||||
if (scriptToExecute == null) {
|
||||
throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found");
|
||||
}
|
||||
throw new DSpaceBadRequestException("Invalid mimetype");
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -47,7 +47,7 @@ public class ProcessFileTypesLinkRepository extends AbstractDSpaceRestRepository
|
||||
* @throws SQLException If something goes wrong
|
||||
* @throws AuthorizeException If something goes wrong
|
||||
*/
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
@PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
|
||||
public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request,
|
||||
Integer processId,
|
||||
@Nullable Pageable optionalPageable,
|
||||
|
@@ -47,7 +47,7 @@ public class ProcessFilesLinkRepository extends AbstractDSpaceRestRepository imp
|
||||
* @throws SQLException If something goes wrong
|
||||
* @throws AuthorizeException If something goes wrong
|
||||
*/
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
@PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
|
||||
public Page<BitstreamRest> getFilesFromProcess(@Nullable HttpServletRequest request,
|
||||
Integer processId,
|
||||
@Nullable Pageable optionalPageable,
|
||||
|
@@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im
|
||||
* @throws SQLException If something goes wrong
|
||||
* @throws AuthorizeException If something goes wrong
|
||||
*/
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
@PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')")
|
||||
public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request,
|
||||
Integer processId,
|
||||
@Nullable Pageable optionalPageable,
|
||||
|
@@ -94,6 +94,22 @@ public class ProcessRestRepository extends DSpaceRestRepository<ProcessRest, Int
|
||||
}
|
||||
}
|
||||
|
||||
@SearchRestMethod(name = "own")
|
||||
@PreAuthorize("hasAuthority('AUTHENTICATED')")
|
||||
public Page<ProcessRest> findByCurrentUser(Pageable pageable) {
|
||||
|
||||
try {
|
||||
Context context = obtainContext();
|
||||
long total = processService.countByUser(context, context.getCurrentUser());
|
||||
List<Process> processes = processService.findByUser(context, context.getCurrentUser(),
|
||||
pageable.getPageSize(),
|
||||
Math.toIntExact(pageable.getOffset()));
|
||||
return converter.toRestPage(processes, pageable, total, utils.obtainProjection());
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls on the getBitstreams method to retrieve all the Bitstreams of this process
|
||||
* @param processId The processId of the Process to retrieve the Bitstreams for
|
||||
|
@@ -37,6 +37,7 @@ import org.dspace.scripts.service.ScriptService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
|
||||
import org.springframework.security.access.AccessDeniedException;
|
||||
import org.springframework.security.access.prepost.PreAuthorize;
|
||||
import org.springframework.stereotype.Component;
|
||||
@@ -56,29 +57,24 @@ public class ScriptRestRepository extends DSpaceRestRepository<ScriptRest, Strin
|
||||
@Autowired
|
||||
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
|
||||
|
||||
// TODO: findOne() currently requires site ADMIN permissions as all scripts are admin-only at this time.
|
||||
// If scripts ever need to be accessible to Comm/Coll Admins, we would likely need to create a new GrantedAuthority
|
||||
// for Comm/Coll Admins in EPersonRestAuthenticationProvider to use on this endpoint
|
||||
@Override
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
// authorization is verified inside the method
|
||||
@PreAuthorize("hasAuthority('AUTHENTICATED')")
|
||||
public ScriptRest findOne(Context context, String name) {
|
||||
|
||||
ScriptConfiguration scriptConfiguration = scriptService.getScriptConfiguration(name);
|
||||
if (scriptConfiguration != null) {
|
||||
if (scriptConfiguration.isAllowedToExecute(context)) {
|
||||
if (scriptConfiguration.isAllowedToExecute(context, null)) {
|
||||
return converter.toRest(scriptConfiguration, utils.obtainProjection());
|
||||
} else {
|
||||
throw new AccessDeniedException("The current user was not authorized to access this script");
|
||||
}
|
||||
}
|
||||
throw new DSpaceBadRequestException("The script with name: " + name + " could not be found");
|
||||
return null;
|
||||
}
|
||||
|
||||
// TODO: findAll() currently requires site ADMIN permissions as all scripts are admin-only at this time.
|
||||
// If scripts ever need to be accessible to Comm/Coll Admins, we would likely need to create a new GrantedAuthority
|
||||
// for Comm/Coll Admins in EPersonRestAuthenticationProvider to use on this endpoint
|
||||
@Override
|
||||
@PreAuthorize("hasAuthority('ADMIN')")
|
||||
// authorization check is performed inside the script service
|
||||
@PreAuthorize("hasAuthority('AUTHENTICATED')")
|
||||
public Page<ScriptRest> findAll(Context context, Pageable pageable) {
|
||||
List<ScriptConfiguration> scriptConfigurations =
|
||||
scriptService.getScriptConfigurations(context);
|
||||
@@ -104,11 +100,17 @@ public class ScriptRestRepository extends DSpaceRestRepository<ScriptRest, Strin
|
||||
List<DSpaceCommandLineParameter> dSpaceCommandLineParameters =
|
||||
processPropertiesToDSpaceCommandLineParameters(properties);
|
||||
ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName);
|
||||
|
||||
if (scriptToExecute == null) {
|
||||
throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found");
|
||||
throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found");
|
||||
}
|
||||
if (!scriptToExecute.isAllowedToExecute(context)) {
|
||||
throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName);
|
||||
try {
|
||||
if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) {
|
||||
throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName
|
||||
+ " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", "));
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new DSpaceBadRequestException("missed handle");
|
||||
}
|
||||
RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler(
|
||||
context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters,
|
||||
|
@@ -7,6 +7,8 @@
|
||||
*/
|
||||
package org.dspace.app.rest;
|
||||
|
||||
import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess;
|
||||
import static org.dspace.content.ProcessStatus.SCHEDULED;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
@@ -220,22 +222,35 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
@Test
|
||||
public void getProcessFiles() throws Exception {
|
||||
context.setCurrentUser(eperson);
|
||||
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
|
||||
|
||||
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
|
||||
processService.appendFile(context, process, is, "inputfile", "test.csv");
|
||||
processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
|
||||
}
|
||||
Bitstream bitstream = processService.getBitstream(context, process, "inputfile");
|
||||
Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile");
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files"))
|
||||
getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.files[0].name", is("test.csv")))
|
||||
.andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" +
|
||||
"[0].value", is("inputfile")));
|
||||
|
||||
getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content"))
|
||||
.andExpect(status().isOk());
|
||||
// also the user that triggered the process should be able to access the process' files
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
getClient(epersonToken)
|
||||
.perform(get("/api/system/processes/" + newProcess.getID() + "/files"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.files[0].name", is("test.csv")))
|
||||
.andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" +
|
||||
"[0].value", is("inputfile")));
|
||||
getClient(epersonToken)
|
||||
.perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content"))
|
||||
.andExpect(status().isOk());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -243,25 +258,34 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
|
||||
|
||||
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
|
||||
processService.appendFile(context, process, is, "inputfile", "test.csv");
|
||||
processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
|
||||
}
|
||||
Bitstream bitstream = processService.getBitstream(context, process, "inputfile");
|
||||
Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile");
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile"))
|
||||
getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv")))
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" +
|
||||
"[0].value", is("inputfile")));
|
||||
// also the user that triggered the process should be able to access the process' files
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
getClient(epersonToken)
|
||||
.perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv")))
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString())))
|
||||
.andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" +
|
||||
"[0].value", is("inputfile")));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getProcessFilesTypes() throws Exception {
|
||||
Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build();
|
||||
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
|
||||
processService.appendFile(context, process, is, "inputfile", "test.csv");
|
||||
processService.appendFile(context, newProcess, is, "inputfile", "test.csv");
|
||||
}
|
||||
|
||||
List<String> fileTypesToCheck = new LinkedList<>();
|
||||
@@ -269,12 +293,18 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes"))
|
||||
getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", ProcessFileTypesMatcher
|
||||
.matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck)));
|
||||
|
||||
.matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck)));
|
||||
|
||||
// also the user that triggered the process should be able to access the process' files
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
getClient(epersonToken)
|
||||
.perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", ProcessFileTypesMatcher
|
||||
.matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -783,27 +813,68 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
.andExpect(status().isBadRequest());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFindByCurrentUser() throws Exception {
|
||||
|
||||
Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
|
||||
.withStartAndEndTime("10/01/1990", "20/01/1990")
|
||||
.build();
|
||||
ProcessBuilder.createProcess(context, admin, "mock-script", parameters)
|
||||
.withStartAndEndTime("11/01/1990", "19/01/1990")
|
||||
.build();
|
||||
Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
|
||||
.withStartAndEndTime("12/01/1990", "18/01/1990")
|
||||
.build();
|
||||
|
||||
String token = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/processes/search/own"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.processes", contains(
|
||||
matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED),
|
||||
matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED))))
|
||||
.andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2))));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getProcessOutput() throws Exception {
|
||||
context.setCurrentUser(eperson);
|
||||
Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters)
|
||||
.withStartAndEndTime("10/01/1990", "20/01/1990")
|
||||
.build();
|
||||
|
||||
try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) {
|
||||
processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO);
|
||||
processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO);
|
||||
}
|
||||
processService.createLogBitstream(context, process);
|
||||
processService.createLogBitstream(context, process1);
|
||||
List<String> fileTypesToCheck = new LinkedList<>();
|
||||
fileTypesToCheck.add("inputfile");
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output"))
|
||||
getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.name",
|
||||
is(process.getName() + process.getID() + ".log")))
|
||||
is(process1.getName() + process1.getID() + ".log")))
|
||||
.andExpect(jsonPath("$.type", is("bitstream")))
|
||||
.andExpect(jsonPath("$.metadata['dc.title'][0].value",
|
||||
is(process.getName() + process.getID() + ".log")))
|
||||
is(process1.getName() + process1.getID() + ".log")))
|
||||
.andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value",
|
||||
is("script_output")));
|
||||
|
||||
String epersonToken = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
getClient(epersonToken)
|
||||
.perform(get("/api/system/processes/" + process1.getID() + "/output"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.name",
|
||||
is(process1.getName() + process1.getID() + ".log")))
|
||||
.andExpect(jsonPath("$.type", is("bitstream")))
|
||||
.andExpect(jsonPath("$.metadata['dc.title'][0].value",
|
||||
is(process1.getName() + process1.getID() + ".log")))
|
||||
.andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value",
|
||||
is("script_output")));
|
||||
|
||||
}
|
||||
}
|
||||
|
@@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
|
||||
import static org.hamcrest.Matchers.allOf;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
@@ -44,6 +45,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.authorize.AuthorizeException;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.GroupBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ProcessBuilder;
|
||||
@@ -53,6 +55,7 @@ import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.content.service.BitstreamService;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.eperson.Group;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.Process;
|
||||
@@ -123,12 +126,72 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
|
||||
|
||||
@Test
|
||||
public void findAllScriptsUnauthorizedTest() throws Exception {
|
||||
public void findAllScriptsGenericLoggedInUserTest() throws Exception {
|
||||
String token = getAuthToken(eperson.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/scripts"))
|
||||
.andExpect(status().isForbidden());
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.page.totalElements", is(0)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findAllScriptsAnonymousUserTest() throws Exception {
|
||||
// this should be changed once we allow anonymous user to execute some scripts
|
||||
getClient().perform(get("/api/system/scripts"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findAllScriptsLocalAdminsTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson comAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("comAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson colAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("colAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("itemAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community")
|
||||
.withAdminGroup(comAdmin)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.withName("Collection")
|
||||
.withAdminGroup(colAdmin)
|
||||
.build();
|
||||
ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
|
||||
.withTitle("Test item to curate").build();
|
||||
context.restoreAuthSystemState();
|
||||
ScriptConfiguration curateScriptConfiguration =
|
||||
scriptConfigurations.stream().filter(scriptConfiguration
|
||||
-> scriptConfiguration.getName().equals("curate"))
|
||||
.findAny().get();
|
||||
|
||||
// the local admins have at least access to the curate script
|
||||
// and not access to process-cleaner script
|
||||
String comAdminToken = getAuthToken(comAdmin.getEmail(), password);
|
||||
getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
|
||||
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription()))))
|
||||
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
|
||||
String colAdminToken = getAuthToken(colAdmin.getEmail(), password);
|
||||
getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
|
||||
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription()))))
|
||||
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
|
||||
String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password);
|
||||
getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100"))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem(
|
||||
ScriptMatcher.matchScript(curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription()))))
|
||||
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -222,6 +285,63 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneScriptByNameLocalAdminsTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson comAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("comAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson colAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("colAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("itemAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community")
|
||||
.withAdminGroup(comAdmin)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.withName("Collection")
|
||||
.withAdminGroup(colAdmin)
|
||||
.build();
|
||||
ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
|
||||
.withTitle("Test item to curate").build();
|
||||
context.restoreAuthSystemState();
|
||||
ScriptConfiguration curateScriptConfiguration =
|
||||
scriptConfigurations.stream().filter(scriptConfiguration
|
||||
-> scriptConfiguration.getName().equals("curate"))
|
||||
.findAny().get();
|
||||
|
||||
String comAdminToken = getAuthToken(comAdmin.getEmail(), password);
|
||||
String colAdminToken = getAuthToken(colAdmin.getEmail(), password);
|
||||
String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password);
|
||||
getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", ScriptMatcher
|
||||
.matchScript(
|
||||
curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription())));
|
||||
getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", ScriptMatcher
|
||||
.matchScript(
|
||||
curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription())));
|
||||
getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName()))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$", ScriptMatcher
|
||||
.matchScript(
|
||||
curateScriptConfiguration.getName(),
|
||||
curateScriptConfiguration.getDescription())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneScriptByNameNotAuthenticatedTest() throws Exception {
|
||||
getClient().perform(get("/api/system/scripts/mock-script"))
|
||||
.andExpect(status().isUnauthorized());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findOneScriptByNameTestAccessDenied() throws Exception {
|
||||
String token = getAuthToken(eperson.getEmail(), password);
|
||||
@@ -235,15 +355,51 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(get("/api/system/scripts/mock-script-invalid"))
|
||||
.andExpect(status().isBadRequest());
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
/**
|
||||
* This test will create a basic structure of communities, collections and items with some local admins at each
|
||||
* level and verify that the local admins, nor generic users can run scripts reserved to administrator
|
||||
* (i.e. default one that don't override the default
|
||||
* {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation
|
||||
*/
|
||||
@Test
|
||||
public void postProcessNonAdminAuthorizeException() throws Exception {
|
||||
String token = getAuthToken(eperson.getEmail(), password);
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson comAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("comAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson colAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("colAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("itemAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community")
|
||||
.withAdminGroup(comAdmin)
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.withName("Collection")
|
||||
.withAdminGroup(colAdmin)
|
||||
.build();
|
||||
Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
|
||||
.withTitle("Test item to curate").build();
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
String token = getAuthToken(eperson.getEmail(), password);
|
||||
String comAdmin_token = getAuthToken(eperson.getEmail(), password);
|
||||
String colAdmin_token = getAuthToken(eperson.getEmail(), password);
|
||||
String itemAdmin_token = getAuthToken(eperson.getEmail(), password);
|
||||
getClient(token).perform(multipart("/api/system/scripts/mock-script/processes"))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes"))
|
||||
.andExpect(status().isForbidden());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -277,16 +433,6 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
@Test
|
||||
public void postProcessAdminNoOptionsFailedStatus() throws Exception {
|
||||
|
||||
// List<ParameterValueRest> list = new LinkedList<>();
|
||||
//
|
||||
// ParameterValueRest parameterValueRest = new ParameterValueRest();
|
||||
// parameterValueRest.setName("-z");
|
||||
// parameterValueRest.setValue("test");
|
||||
// ParameterValueRest parameterValueRest1 = new ParameterValueRest();
|
||||
// parameterValueRest1.setName("-q");
|
||||
// list.add(parameterValueRest);
|
||||
// list.add(parameterValueRest1);
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> parameters = new LinkedList<>();
|
||||
|
||||
parameters.add(new DSpaceCommandLineParameter("-z", "test"));
|
||||
@@ -322,7 +468,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes"))
|
||||
.andExpect(status().isBadRequest());
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -434,12 +580,19 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception {
|
||||
|
||||
String token = getAuthToken(admin.getEmail(), password);
|
||||
|
||||
getClient(token)
|
||||
.perform(post("/api/system/scripts/mock-script/processes"))
|
||||
.andExpect(status().isBadRequest());
|
||||
|
||||
getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes"))
|
||||
.andExpect(status().isBadRequest());
|
||||
.andExpect(status().isNotFound());
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -601,9 +754,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@After
|
||||
public void destroy() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> {
|
||||
try {
|
||||
processService.delete(context, process);
|
||||
@@ -611,6 +764,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
context.restoreAuthSystemState();
|
||||
super.destroy();
|
||||
}
|
||||
|
||||
|
@@ -11,7 +11,6 @@ import java.io.InputStream;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.app.rest.converter.ScriptConverter;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
|
||||
/**
|
||||
@@ -28,10 +27,6 @@ public class TypeConversionTestScriptConfiguration<T extends TypeConversionTestS
|
||||
|
||||
}
|
||||
|
||||
public boolean isAllowedToExecute(final Context context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Options getOptions() {
|
||||
|
||||
Options options = new Options();
|
||||
|
@@ -14,6 +14,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
|
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
@@ -29,13 +30,19 @@ import org.dspace.app.rest.projection.Projection;
|
||||
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
|
||||
import org.dspace.builder.CollectionBuilder;
|
||||
import org.dspace.builder.CommunityBuilder;
|
||||
import org.dspace.builder.EPersonBuilder;
|
||||
import org.dspace.builder.ItemBuilder;
|
||||
import org.dspace.builder.ProcessBuilder;
|
||||
import org.dspace.content.Collection;
|
||||
import org.dspace.content.Community;
|
||||
import org.dspace.content.Item;
|
||||
import org.dspace.content.ProcessStatus;
|
||||
import org.dspace.content.Site;
|
||||
import org.dspace.content.factory.ContentServiceFactory;
|
||||
import org.dspace.eperson.EPerson;
|
||||
import org.dspace.scripts.DSpaceCommandLineParameter;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.service.ScriptService;
|
||||
import org.junit.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
@@ -49,6 +56,9 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
@Autowired
|
||||
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
|
||||
|
||||
@Autowired
|
||||
private ScriptService scriptService;
|
||||
|
||||
private final static String SCRIPTS_ENDPOINT = "/api/" + ScriptRest.CATEGORY + "/" + ScriptRest.PLURAL_NAME;
|
||||
private final static String CURATE_SCRIPT_ENDPOINT = SCRIPTS_ENDPOINT + "/curate/" + ProcessRest.PLURAL_NAME;
|
||||
|
||||
@@ -371,6 +381,263 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This test will create a basic structure of communities, collections and items with some local admins at each
|
||||
* level and verify that the local admins can only run the curate script on their own objects
|
||||
*/
|
||||
@Test
|
||||
public void securityCurateTest() throws Exception {
|
||||
context.turnOffAuthorisationSystem();
|
||||
EPerson comAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("comAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson colAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("colAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
EPerson itemAdmin = EPersonBuilder.createEPerson(context)
|
||||
.withEmail("itemAdmin@example.com")
|
||||
.withPassword(password).build();
|
||||
Community community = CommunityBuilder.createCommunity(context)
|
||||
.withName("Community")
|
||||
.withAdminGroup(comAdmin)
|
||||
.build();
|
||||
Community anotherCommunity = CommunityBuilder.createCommunity(context)
|
||||
.withName("Another Community")
|
||||
.build();
|
||||
Collection collection = CollectionBuilder.createCollection(context, community)
|
||||
.withName("Collection")
|
||||
.withAdminGroup(colAdmin)
|
||||
.build();
|
||||
Collection anotherCollection = CollectionBuilder.createCollection(context, anotherCommunity)
|
||||
.withName("AnotherCollection")
|
||||
.build();
|
||||
Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin)
|
||||
.withTitle("Test item to curate").build();
|
||||
Item anotherItem = ItemBuilder.createItem(context, anotherCollection)
|
||||
.withTitle("Another Test item to curate").build();
|
||||
Site site = ContentServiceFactory.getInstance().getSiteService().findSite(context);
|
||||
context.restoreAuthSystemState();
|
||||
|
||||
LinkedList<DSpaceCommandLineParameter> siteParameters = new LinkedList<>();
|
||||
siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle()));
|
||||
siteParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> comParameters = new LinkedList<>();
|
||||
comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle()));
|
||||
comParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> anotherComParameters = new LinkedList<>();
|
||||
anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle()));
|
||||
anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> colParameters = new LinkedList<>();
|
||||
colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle()));
|
||||
colParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> anotherColParameters = new LinkedList<>();
|
||||
anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle()));
|
||||
anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> itemParameters = new LinkedList<>();
|
||||
itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle()));
|
||||
itemParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
LinkedList<DSpaceCommandLineParameter> anotherItemParameters = new LinkedList<>();
|
||||
anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle()));
|
||||
anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop"));
|
||||
|
||||
String comAdminToken = getAuthToken(comAdmin.getEmail(), password);
|
||||
String colAdminToken = getAuthToken(colAdmin.getEmail(), password);
|
||||
String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password);
|
||||
|
||||
List<ParameterValueRest> listCurateSite = siteParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listCom = comParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listAnotherCom = anotherComParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listCol = colParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listAnotherCol = anotherColParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listItem = itemParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
List<ParameterValueRest> listAnotherItem = anotherItemParameters.stream()
|
||||
.map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter
|
||||
.convert(dSpaceCommandLineParameter, Projection.DEFAULT))
|
||||
.collect(Collectors.toList());
|
||||
String adminToken = getAuthToken(admin.getEmail(), password);
|
||||
List<ProcessStatus> acceptableProcessStatuses = new LinkedList<>();
|
||||
acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED,
|
||||
ProcessStatus.RUNNING,
|
||||
ProcessStatus.COMPLETED));
|
||||
|
||||
AtomicReference<Integer> idSiteRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idComRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idComColRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idComItemRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idColRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idColItemRef = new AtomicReference<>();
|
||||
AtomicReference<Integer> idItemRef = new AtomicReference<>();
|
||||
|
||||
ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate");
|
||||
// we should be able to start the curate script with all our admins on the respective dso
|
||||
try {
|
||||
// start a process as general admin
|
||||
getClient(adminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCurateSite)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(admin.getID()),
|
||||
siteParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idSiteRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
|
||||
// check with the com admin
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCom)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(comAdmin.getID()),
|
||||
comParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idComRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
// the com admin should be able to run the curate also over the children collection and item
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCol)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(comAdmin.getID()),
|
||||
colParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idComColRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listItem)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(comAdmin.getID()),
|
||||
itemParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idComItemRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
// the com admin should be NOT able to run the curate over other com, col or items
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCurateSite)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherCom)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherCol)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(comAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherItem)))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
// check with the col admin
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCol)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(colAdmin.getID()),
|
||||
colParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idColRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
// the col admin should be able to run the curate also over the owned item
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listItem)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(colAdmin.getID()),
|
||||
itemParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idColItemRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
|
||||
// the col admin should be NOT able to run the curate over the community nor another collection nor
|
||||
// on a not owned item
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCurateSite)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCom)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherCol)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(colAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherItem)))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
// check with the item admin
|
||||
getClient(itemAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listItem)))
|
||||
.andExpect(status().isAccepted())
|
||||
.andExpect(jsonPath("$", is(
|
||||
ProcessMatcher.matchProcess("curate",
|
||||
String.valueOf(itemAdmin.getID()),
|
||||
itemParameters,
|
||||
acceptableProcessStatuses))))
|
||||
.andDo(result -> idItemRef
|
||||
.set(read(result.getResponse().getContentAsString(), "$.processId")));
|
||||
// the item admin should be NOT able to run the curate over the community nor the collection nor
|
||||
// on a not owned item
|
||||
getClient(itemAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCurateSite)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(itemAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCom)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(itemAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listCol)))
|
||||
.andExpect(status().isForbidden());
|
||||
getClient(itemAdminToken)
|
||||
.perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes")
|
||||
.param("properties", new ObjectMapper().writeValueAsString(listAnotherItem)))
|
||||
.andExpect(status().isForbidden());
|
||||
|
||||
} finally {
|
||||
ProcessBuilder.deleteProcess(idSiteRef.get());
|
||||
ProcessBuilder.deleteProcess(idComRef.get());
|
||||
ProcessBuilder.deleteProcess(idComColRef.get());
|
||||
ProcessBuilder.deleteProcess(idComItemRef.get());
|
||||
ProcessBuilder.deleteProcess(idColRef.get());
|
||||
ProcessBuilder.deleteProcess(idColItemRef.get());
|
||||
ProcessBuilder.deleteProcess(idItemRef.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -8,21 +8,13 @@
|
||||
package org.dspace.scripts;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.dspace.authorize.service.AuthorizeService;
|
||||
import org.dspace.core.Context;
|
||||
import org.dspace.scripts.configuration.ScriptConfiguration;
|
||||
import org.dspace.scripts.impl.MockDSpaceRunnableScript;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableScript> extends ScriptConfiguration<T> {
|
||||
|
||||
|
||||
@Autowired
|
||||
private AuthorizeService authorizeService;
|
||||
|
||||
private Class<T> dspaceRunnableClass;
|
||||
|
||||
@Override
|
||||
@@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration<T extends MockDSpaceRunnableS
|
||||
this.dspaceRunnableClass = dspaceRunnableClass;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAllowedToExecute(Context context) {
|
||||
try {
|
||||
return authorizeService.isAdmin(context);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Options getOptions() {
|
||||
if (options == null) {
|
||||
|
@@ -75,6 +75,9 @@
|
||||
<xsl:apply-templates
|
||||
select="doc:metadata/doc:element[@name='dc']/doc:element[@name='identifier']/doc:element[@name='uri']"
|
||||
mode="datacite"/>
|
||||
<!-- ACCESS RIGHTS from access status mechanism -->
|
||||
<xsl:apply-templates
|
||||
select="doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']" mode="datacite" />
|
||||
<!-- datacite:rights -->
|
||||
<xsl:apply-templates
|
||||
select="doc:metadata/doc:element[@name='dc']/doc:element[@name='rights']" mode="datacite"/>
|
||||
@@ -658,6 +661,40 @@
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
|
||||
<!-- from Access Status mechanism -->
|
||||
<!-- datacite:rights -->
|
||||
<!-- https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_accessrights.html -->
|
||||
<xsl:template match="doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']" mode="datacite">
|
||||
<xsl:variable name="rightsValue">
|
||||
<xsl:call-template name="resolveRightsName">
|
||||
<xsl:with-param name="field" select="text()"/>
|
||||
</xsl:call-template>
|
||||
</xsl:variable>
|
||||
<xsl:variable name="rightsURI">
|
||||
<xsl:call-template name="resolveRightsURI">
|
||||
<xsl:with-param name="field" select="text()"/>
|
||||
</xsl:call-template>
|
||||
</xsl:variable>
|
||||
<xsl:variable name="lc_rightsValue">
|
||||
<xsl:call-template name="lowercase">
|
||||
<xsl:with-param name="value" select="$rightsValue"/>
|
||||
</xsl:call-template>
|
||||
</xsl:variable>
|
||||
<!-- We are checking to ensure that only values ending in "access" can be used as datacite:rights.
|
||||
This is a valid solution as we pre-normalize dc.rights values in openaire4.xsl to end in the term
|
||||
"access" according to COAR Controlled Vocabulary -->
|
||||
<xsl:if test="ends-with($lc_rightsValue,'access')">
|
||||
<datacite:rights>
|
||||
<xsl:if test="$rightsURI">
|
||||
<xsl:attribute name="rightsURI">
|
||||
<xsl:value-of select="$rightsURI"/>
|
||||
</xsl:attribute>
|
||||
</xsl:if>
|
||||
<xsl:value-of select="$rightsValue"/>
|
||||
</datacite:rights>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
|
||||
|
||||
<!-- datacite:subjects -->
|
||||
<!-- https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_subject.html -->
|
||||
@@ -1125,11 +1162,11 @@
|
||||
<!-- Auxiliary templates - get global values -->
|
||||
<!-- -->
|
||||
|
||||
<!-- get the coar access rights globally -->
|
||||
<!-- get the coar access rights globally from access status mechanism -->
|
||||
<xsl:template name="getRightsURI">
|
||||
<xsl:call-template name="resolveRightsURI">
|
||||
<xsl:with-param name="field"
|
||||
select="//doc:element[@name='dc']/doc:element[@name='rights']/doc:element/doc:field[@name='value'and ends-with(translate(text(), $uppercase, $smallcase),'access')]/text()"/>
|
||||
select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/>
|
||||
</xsl:call-template>
|
||||
</xsl:template>
|
||||
|
||||
@@ -1207,7 +1244,7 @@
|
||||
</xsl:element>
|
||||
</xsl:if>
|
||||
</xsl:template>
|
||||
|
||||
|
||||
<!--
|
||||
This template will recursively create the field name based on parent node names
|
||||
to be something like this:
|
||||
@@ -1602,6 +1639,37 @@
|
||||
</xsl:choose>
|
||||
</xsl:template>
|
||||
|
||||
<!--
|
||||
This template will return the COAR Access Right Vocabulary Names in English
|
||||
like "open access"
|
||||
based on the values from DSpace Access Status mechanism like String 'open.access'
|
||||
please check class org.dspace.access.status.DefaultAccessStatusHelper for more information
|
||||
https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_accessrights.html#definition-and-usage-instruction
|
||||
-->
|
||||
<xsl:template name="resolveRightsName">
|
||||
<xsl:param name="field"/>
|
||||
<xsl:variable name="lc_value">
|
||||
<xsl:call-template name="lowercase">
|
||||
<xsl:with-param name="value" select="$field"/>
|
||||
</xsl:call-template>
|
||||
</xsl:variable>
|
||||
<xsl:choose>
|
||||
<xsl:when test="$lc_value = 'open.access'">
|
||||
<xsl:text>open access</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'embargo'">
|
||||
<xsl:text>embargoed access</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'restricted'">
|
||||
<xsl:text>restricted access</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'metadata.only'">
|
||||
<xsl:text>metadata only access</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:otherwise/>
|
||||
</xsl:choose>
|
||||
</xsl:template>
|
||||
|
||||
<!--
|
||||
This template will return the COAR Access Right Vocabulary URI
|
||||
like http://purl.org/coar/access_right/c_abf2
|
||||
@@ -1616,16 +1684,16 @@
|
||||
</xsl:call-template>
|
||||
</xsl:variable>
|
||||
<xsl:choose>
|
||||
<xsl:when test="$lc_value = 'open access'">
|
||||
<xsl:when test="$lc_value = 'open access' or $lc_value = 'open.access'">
|
||||
<xsl:text>http://purl.org/coar/access_right/c_abf2</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'embargoed access'">
|
||||
<xsl:when test="$lc_value = 'embargoed access' or $lc_value = 'embargo'">
|
||||
<xsl:text>http://purl.org/coar/access_right/c_f1cf</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'restricted access'">
|
||||
<xsl:when test="$lc_value = 'restricted access' or $lc_value = 'restricted'">
|
||||
<xsl:text>http://purl.org/coar/access_right/c_16ec</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:when test="$lc_value = 'metadata only access'">
|
||||
<xsl:when test="$lc_value = 'metadata only access' or $lc_value = 'metadata.only'">
|
||||
<xsl:text>http://purl.org/coar/access_right/c_14cb</xsl:text>
|
||||
</xsl:when>
|
||||
<xsl:otherwise/>
|
||||
|
@@ -11,7 +11,8 @@
|
||||
|
||||
<!ELEMENT name-map EMPTY >
|
||||
<!ATTLIST name-map
|
||||
collection-handle CDATA #REQUIRED
|
||||
collection-handle CDATA #IMPLIED
|
||||
collection-entity-type CDATA #IMPLIED
|
||||
submission-name NMTOKEN #REQUIRED>
|
||||
|
||||
<!-- 'step-definitions' must contain at least one 'step-definition' node -->
|
||||
|
@@ -47,6 +47,26 @@
|
||||
<name-map collection-handle="123456789/29" submission-name="JournalVolume"/>
|
||||
<name-map collection-handle="123456789/30" submission-name="JournalIssue"/>
|
||||
-->
|
||||
<!-- These configurations enable default submission forms per Entity type
|
||||
|
||||
The collection-entity-type will be the entity-type attribute associated with a collection,
|
||||
typically the entity name that is associated with a collection if any created or loaded
|
||||
(that is usually specified in relationship-types.xml).
|
||||
- - - - - -
|
||||
PLEASE NOTICE THAT YOU WILL HAVE TO RESTART DSPACE
|
||||
- - - - - -
|
||||
Uncomment if you intend to use them
|
||||
-->
|
||||
<!--
|
||||
<name-map collection-entity-type="Publication" submission-name="Publication"/>
|
||||
<name-map collection-entity-type="Person" submission-name="Person"/>
|
||||
<name-map collection-entity-type="Project" submission-name="Project"/>
|
||||
<name-map collection-entity-type="OrgUnit" submission-name="OrgUnit"/>
|
||||
<name-map collection-entity-type="Journal" submission-name="Journal"/>
|
||||
<name-map collection-entity-type="JournalVolume" submission-name="JournalVolume"/>
|
||||
<name-map collection-entity-type="JournalIssue" submission-name="JournalIssue"/>
|
||||
-->
|
||||
|
||||
</submission-map>
|
||||
|
||||
|
||||
|
@@ -21,4 +21,8 @@
|
||||
<!-- Additional item.compile plugin to enrich field with information about
|
||||
Creative Commons License metadata -->
|
||||
<bean class="org.dspace.xoai.app.CCElementItemCompilePlugin"/>
|
||||
|
||||
<!-- Additional item.compile plugin to enrich field with information about
|
||||
Access Status metadata -->
|
||||
<bean class="org.dspace.xoai.app.plugins.AccessStatusElementItemCompilePlugin"/>
|
||||
</beans>
|
||||
|
Reference in New Issue
Block a user