Merge branch 'main' into w2p-102057_refferer-fix-main

This commit is contained in:
Kristof De Langhe
2023-06-08 10:08:09 +02:00
94 changed files with 5092 additions and 456 deletions

View File

@@ -23,6 +23,7 @@ import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.tika.Tika;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.authorize.AuthorizeException;
@@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
protected boolean zip = false;
protected boolean remoteUrl = false;
protected String zipfilename = null;
protected boolean zipvalid = false;
protected boolean help = false;
protected File workDir = null;
protected File workFile = null;
@@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
handler.logInfo("***End of Test Run***");
}
} finally {
// clean work dir
if (zip) {
FileUtils.deleteDirectory(new File(sourcedir));
FileUtils.deleteDirectory(workDir);
if (remoteUrl && workFile != null && workFile.exists()) {
// if zip file was valid then clean sourcedir
if (zipvalid && sourcedir != null && new File(sourcedir).exists()) {
FileUtils.deleteDirectory(new File(sourcedir));
}
// clean workdir
if (workDir != null && workDir.exists()) {
FileUtils.deleteDirectory(workDir);
}
// conditionally clean workFile if import was done in the UI or via a URL and it still exists
if (workFile != null && workFile.exists()) {
workFile.delete();
}
}
@@ -329,7 +339,14 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
// manage zip via remote url
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
}
if (optionalFileStream.isPresent()) {
// validate zip file
Optional<InputStream> validationFileStream = handler.getFileStream(context, zipfilename);
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
@@ -337,10 +354,32 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
}
/**
* Confirm that the zip file has the correct MIME type
* @param inputStream
*/
protected void validateZip(InputStream inputStream) {
Tika tika = new Tika();
try {
String mimeType = tika.detect(inputStream);
if (mimeType.equals("application/zip")) {
zipvalid = true;
} else {
handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType);
throw new UnsupportedOperationException("A valid zip file must be supplied");
}
} catch (IOException e) {
throw new IllegalArgumentException(
"There was an error while reading the zip file: " + zipfilename);
}
}
/**
* Read the mapfile
* @param context

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;
import java.sql.SQLException;
@@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport {
// If this is a zip archive, unzip it first
if (zip) {
if (!remoteUrl) {
// confirm zip file exists
File myZipFile = new File(sourcedir + File.separator + zipfilename);
if ((!myZipFile.exists()) || (!myZipFile.isFile())) {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
// validate zip file
InputStream validationFileStream = new FileInputStream(myZipFile);
validateZip(validationFileStream);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(
@@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport {
// manage zip via remote url
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (optionalFileStream.isPresent()) {
// validate zip file via url
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
}
}
}

View File

@@ -22,7 +22,9 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
File f2 = null;
File f3 = null;
try {
f2 = getImageFile(f, 0, verbose);
// Step 1: get an image from our PDF file, with PDF-specific processing options
f2 = getImageFile(f, verbose);
// Step 2: use the image above to create the final resized and rotated thumbnail
f3 = getThumbnailFile(f2, verbose);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);

View File

@@ -116,9 +116,17 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
/**
* Return an image from a bitstream with specific processing options for
* PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to
* generate an intermediate image file for use with getThumbnailFile.
*/
public File getImageFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
// Writing an intermediate file to disk is inefficient, but since we're
// doing it anyway, we should use a lossless format. IM's internal MIFF
// is lossless like PNG and TIFF, but much faster.
File f2 = new File(f.getParentFile(), f.getName() + ".miff");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
@@ -155,7 +163,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
op.define("pdf:use-cropbox=true");
}
String s = "[" + page + "]";
String s = "[0]";
op.addImage(f.getAbsolutePath() + s);
if (configurationService.getBooleanProperty(PRE + ".flatten", true)) {
op.flatten();
@@ -208,20 +216,20 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.format("%s %s matches pattern and is replacable.%n",
description, nsrc);
System.out.format("%s %s matches pattern and is replaceable.%n",
description, n);
}
continue;
}
if (description.equals(getDescription())) {
if (verbose) {
System.out.format("%s %s is replaceable.%n",
getDescription(), nsrc);
getDescription(), n);
}
continue;
}
}
System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n",
nsrc, item.getHandle());
return false;
}

View File

@@ -11,6 +11,9 @@ import java.io.Serializable;
import java.util.Map;
import org.apache.commons.lang3.BooleanUtils;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.WorkspaceItem;
import org.hibernate.proxy.HibernateProxyHelper;
/**
* Class representing configuration for a single step within an Item Submission
@@ -173,6 +176,38 @@ public class SubmissionStepConfig implements Serializable {
return visibilityOutside;
}
/**
* Check if given submission section object is hidden for the current submission scope
*
* @param obj the InProgressSubmission to check
* @return true if the submission section is hidden, false otherwise
*/
public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) {
String scopeToCheck = getScope(obj);
if (scope == null || scopeToCheck == null) {
return false;
}
String visibility = getVisibility();
String visibilityOutside = getVisibilityOutside();
if (scope.equalsIgnoreCase(scopeToCheck)) {
return "hidden".equalsIgnoreCase(visibility);
} else {
return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside);
}
}
private String getScope(InProgressSubmission obj) {
if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) {
return "submission";
}
return "workflow";
}
/**
* Get the number of this step in the current Submission process config.
* Step numbers start with #0 (although step #0 is ALWAYS the special

View File

@@ -22,11 +22,13 @@ import org.dspace.sort.SortOption;
* This class holds all the information about a specifically configured
* BrowseIndex. It is responsible for parsing the configuration, understanding
* about what sort options are available, and what the names of the database
* tables that hold all the information are actually called.
* tables that hold all the information are actually called. Hierarchical browse
* indexes also contain information about the vocabulary they're using, see:
* {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex}
*
* @author Richard Jones
*/
public final class BrowseIndex {
public class BrowseIndex {
/** the configuration number, as specified in the config */
/**
* used for single metadata browse tables for generating the table name
@@ -102,7 +104,7 @@ public final class BrowseIndex {
*
* @param baseName The base of the table name
*/
private BrowseIndex(String baseName) {
protected BrowseIndex(String baseName) {
try {
number = -1;
tableBaseName = baseName;

View File

@@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO {
}
private void addDefaultFilterQueries(DiscoverQuery query) {
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container);
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container);
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
}

View File

@@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event;
import org.dspace.harvest.HarvestedItem;
import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.identifier.DOI;
import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.DOIService;
import org.dspace.identifier.service.IdentifierService;
import org.dspace.orcid.OrcidHistory;
import org.dspace.orcid.OrcidQueue;
@@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true)
protected IdentifierService identifierService;
@Autowired(required = true)
protected DOIService doiService;
@Autowired(required = true)
protected VersioningService versioningService;
@Autowired(required = true)
protected HarvestedItemService harvestedItemService;
@@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// Remove any Handle
handleService.unbindHandle(context, item);
// Delete a DOI if linked to the item.
// If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid,
// hibernate will throw a foreign constraint exception.
// Here we use the DOI service directly as it is able to manage DOIs even without any configured
// consumer or provider.
DOI doi = doiService.findDOIByDSpaceObject(context, item);
if (doi != null) {
doi.setDSpaceObject(null);
}
// remove version attached to the item
removeVersion(context, item);

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
@@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Utils;
import org.dspace.core.service.PluginService;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions =
new HashMap<String, Map<String, List<String>>>();
// Map of vocabulary authorities to and their index info equivalent
protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>();
// the item submission reader
private SubmissionConfigReader itemSubmissionConfigReader;
@@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected ConfigurationService configurationService;
@Autowired(required = true)
protected PluginService pluginService;
@Autowired
private DiscoveryConfigurationService searchConfigurationService;
final static String CHOICES_PLUGIN_PREFIX = "choices.plugin.";
final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation.";
@@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getParentChoice(authorityName, vocabularyId, locale);
}
@Override
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) {
if (this.vocabularyIndexMap.containsKey(nameVocab)) {
return this.vocabularyIndexMap.get(nameVocab);
} else {
init();
ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab);
if (source != null && source instanceof DSpaceControlledVocabulary) {
Set<String> metadataFields = new HashSet<>();
Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab);
for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) {
metadataFields.addAll(formToField.getValue().stream().map(value ->
StringUtils.replace(value, "_", "."))
.collect(Collectors.toList()));
}
DiscoverySearchFilterFacet matchingFacet = null;
for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) {
boolean coversAllFieldsFromVocab = true;
for (String fieldFromVocab: metadataFields) {
boolean coversFieldFromVocab = false;
for (String facetMdField: facetConfig.getMetadataFields()) {
if (facetMdField.startsWith(fieldFromVocab)) {
coversFieldFromVocab = true;
break;
}
}
if (!coversFieldFromVocab) {
coversAllFieldsFromVocab = false;
break;
}
}
if (coversAllFieldsFromVocab) {
matchingFacet = facetConfig;
break;
}
}
DSpaceControlledVocabularyIndex vocabularyIndex =
new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields,
matchingFacet);
this.vocabularyIndexMap.put(nameVocab, vocabularyIndex);
return vocabularyIndex;
}
return null;
}
}
}

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.Set;
import org.dspace.browse.BrowseIndex;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
/**
* Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a
* {@code BrowseIndexRest}
* cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)}
*
* @author Marie Verdonck (Atmire) on 04/05/2023
*/
public class DSpaceControlledVocabularyIndex extends BrowseIndex {
protected DSpaceControlledVocabulary vocabulary;
protected Set<String> metadataFields;
protected DiscoverySearchFilterFacet facetConfig;
public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set<String> metadataFields,
DiscoverySearchFilterFacet facetConfig) {
super(controlledVocabulary.vocabularyName);
this.vocabulary = controlledVocabulary;
this.metadataFields = metadataFields;
this.facetConfig = facetConfig;
}
public DSpaceControlledVocabulary getVocabulary() {
return vocabulary;
}
public Set<String> getMetadataFields() {
return this.metadataFields;
}
public DiscoverySearchFilterFacet getFacetConfig() {
return this.facetConfig;
}
}

View File

@@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choice;
import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.Choices;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
/**
* Broker for ChoiceAuthority plugins, and for other information configured
@@ -220,4 +221,7 @@ public interface ChoiceAuthorityService {
* @return the parent Choice object if any
*/
public Choice getParentChoice(String authorityName, String vocabularyId, String locale);
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab);
}

View File

@@ -18,6 +18,9 @@ import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
@@ -73,35 +76,80 @@ public class SearchUtils {
searchService = null;
}
/**
* Retrieves the Discovery Configuration for a null context, prefix and DSpace object.
* This will result in returning the default configuration
* @return the default configuration
*/
public static DiscoveryConfiguration getDiscoveryConfiguration() {
return getDiscoveryConfiguration(null, null);
return getDiscoveryConfiguration(null, null, null);
}
public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) {
return getDiscoveryConfiguration(null, dso);
/**
* Retrieves the Discovery Configuration with a null prefix for a DSpace object.
* @param context
* the dabase context
* @param dso
* the DSpace object
* @return the Discovery Configuration for the specified DSpace object
*/
public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) {
return getDiscoveryConfiguration(context, null, dso);
}
/**
* Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A
* null prefix mean the normal query, other predefined values are workspace or workflow
*
*
*
* @param context
* the database context
* @param prefix
* the namespace of the configuration to lookup if any
* @param dso
* the DSpaceObject
* @return the discovery configuration for the specified scope
*/
public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) {
public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix,
DSpaceObject dso) {
if (prefix != null) {
return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix);
} else {
return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null);
return getDiscoveryConfigurationByDSO(context, dso);
}
}
/**
* Retrieve the configuration for the current dspace object and all its parents and add it to the provided set
* @param context - The database context
* @param configurations - The set of configurations to add the retrieved configurations to
* @param prefix - The namespace of the configuration to lookup if any
* @param dso - The DSpace Object
* @return the set of configurations with additional retrieved ones for the dspace object and parents
* @throws SQLException
*/
public static Set<DiscoveryConfiguration> addDiscoveryConfigurationForParents(
Context context, Set<DiscoveryConfiguration> configurations, String prefix, DSpaceObject dso)
throws SQLException {
if (dso == null) {
configurations.add(getDiscoveryConfigurationByName(null));
return configurations;
}
if (prefix != null) {
configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle()));
} else {
configurations.add(getDiscoveryConfigurationByName(dso.getHandle()));
}
DSpaceObjectService<DSpaceObject> dSpaceObjectService = ContentServiceFactory.getInstance()
.getDSpaceObjectService(dso);
DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso);
return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject);
}
/**
* Return the discovery configuration identified by the specified name
*
*
* @param configurationName the configuration name assigned to the bean in the
* discovery.xml
* @return the discovery configuration
@@ -113,6 +161,18 @@ public class SearchUtils {
return configurationService.getDiscoveryConfiguration(configurationName);
}
/**
* Return the discovery configuration for the provided DSO
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO
*/
public static DiscoveryConfiguration getDiscoveryConfigurationByDSO(
Context context, DSpaceObject dso) {
DiscoveryConfigurationService configurationService = getConfigurationService();
return configurationService.getDiscoveryDSOConfiguration(context, dso);
}
public static DiscoveryConfigurationService getConfigurationService() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager
@@ -127,47 +187,55 @@ public class SearchUtils {
* Method that retrieves a list of all the configuration objects from the given item
* A configuration object can be returned for each parent community/collection
*
* @param context the database context
* @param item the DSpace item
* @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Item item) throws SQLException {
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Context context, Item item)
throws SQLException {
List<Collection> collections = item.getCollections();
return getAllDiscoveryConfigurations(null, collections, item);
return getAllDiscoveryConfigurations(context, null, collections, item);
}
/**
* Return all the discovery configuration applicable to the provided workspace item
*
* @param context
* @param witem a workspace item
* @return a list of discovery configuration
* @throws SQLException
*/
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException {
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkspaceItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workspace", collections, witem.getItem());
return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem());
}
/**
* Return all the discovery configuration applicable to the provided workflow item
*
* @param context
* @param witem a workflow item
* @return a list of discovery configuration
* @throws SQLException
*/
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException {
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkflowItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workflow", collections, witem.getItem());
return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem());
}
private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(String prefix,
private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
String prefix,
List<Collection> collections, Item item)
throws SQLException {
Set<DiscoveryConfiguration> result = new HashSet<>();
for (Collection collection : collections) {
DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection);
result.add(configuration);
addDiscoveryConfigurationForParents(context, result, prefix, collection);
}
//Add alwaysIndex configurations

View File

@@ -53,10 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
if (bitstreams != null) {
for (Bitstream bitstream : bitstreams) {
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
// Add _keyword and _filter fields which are necessary to support filtering and faceting
// for the file names
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
String description = bitstream.getDescription();
if ((description != null) && !description.isEmpty()) {
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
// Add _keyword and _filter fields which are necessary to support filtering and
// faceting for the descriptions
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
description);
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
description);
}
}
}
@@ -65,4 +75,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
}
}
}
}
}

View File

@@ -7,12 +7,23 @@
*/
package org.dspace.discovery.configuration;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableDSpaceObject;
import org.dspace.services.factory.DSpaceServicesFactory;
@@ -22,9 +33,18 @@ import org.dspace.services.factory.DSpaceServicesFactory;
*/
public class DiscoveryConfigurationService {
private static final Logger log = LogManager.getLogger();
private Map<String, DiscoveryConfiguration> map;
private Map<Integer, List<String>> toIgnoreMetadataFields = new HashMap<>();
/**
* Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its
* own configuration, we take the one of the first parent that does.
* This cache ensures we do not have to go up the hierarchy every time.
*/
private final Map<UUID, DiscoveryConfiguration> comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>();
public Map<String, DiscoveryConfiguration> getMap() {
return map;
}
@@ -41,25 +61,98 @@ public class DiscoveryConfigurationService {
this.toIgnoreMetadataFields = toIgnoreMetadataFields;
}
public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) {
/**
* Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from
* the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will
* be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default
* configuration will be retrieved
*
* When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param indexableObject - The IndexableObject to retrieve the configuration for
* @return the discovery configuration for the provided IndexableObject.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) {
String name;
if (dso == null) {
name = "default";
} else if (dso instanceof IndexableDSpaceObject) {
name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle();
if (indexableObject == null) {
return getDiscoveryConfiguration(null);
} else if (indexableObject instanceof IndexableDSpaceObject) {
return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject());
} else {
name = dso.getUniqueIndexID();
name = indexableObject.getUniqueIndexID();
}
return getDiscoveryConfiguration(name);
}
public DiscoveryConfiguration getDiscoveryConfiguration(final String name) {
/**
* Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO.
*/
public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) {
// Fall back to default configuration
if (dso == null) {
return getDiscoveryConfiguration(null, true);
}
// Attempt to retrieve cached configuration by UUID
if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) {
return comColToDiscoveryConfigurationMap.get(dso.getID());
}
DiscoveryConfiguration configuration;
// Attempt to retrieve configuration by DSO handle
configuration = getDiscoveryConfiguration(dso.getHandle(), false);
if (configuration == null) {
// Recurse up the Comm/Coll hierarchy until a configuration is found
DSpaceObjectService<DSpaceObject> dSpaceObjectService =
ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
DSpaceObject parentObject = null;
try {
parentObject = dSpaceObjectService.getParentObject(context, dso);
} catch (SQLException e) {
log.error(e);
}
configuration = getDiscoveryDSOConfiguration(context, parentObject);
}
// Cache the resulting configuration when the DSO is a Community or Collection
if (dso instanceof Community || dso instanceof Collection) {
comColToDiscoveryConfigurationMap.put(dso.getID(), configuration);
}
return configuration;
}
/**
* Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the
* default configuration will be returned.
* @param name - The name of the configuration to be retrieved
* @return the Discovery Configuration for the provided name, or default when none was found.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(String name) {
return getDiscoveryConfiguration(name, true);
}
/**
* Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration
* will be returned when no match is found. When useDefault is set to false, null will be returned when no match is
* found.
* @param name - The name of the configuration to retrieve
* @param useDefault - Whether the default configuration should be used when no match is found
* @return the configuration for the provided name
*/
public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) {
DiscoveryConfiguration result;
result = StringUtils.isBlank(name) ? null : getMap().get(name);
if (result == null) {
if (result == null && useDefault) {
//No specific configuration, get the default one
result = getMap().get("default");
}
@@ -67,12 +160,23 @@ public class DiscoveryConfigurationService {
return result;
}
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName,
final IndexableObject dso) {
/**
* Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be
* checked for the provided name. When no match is found for the name, the configuration will be retrieved for the
* IndexableObject
*
* @param context - The database context
* @param configurationName - The name of the configuration to be retrieved
* @param indexableObject - The indexable object to retrieve the configuration for
* @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject
*/
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context,
String configurationName,
IndexableObject indexableObject) {
if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) {
return getMap().get(configurationName);
} else {
return getDiscoveryConfiguration(dso);
return getDiscoveryConfiguration(context, indexableObject);
}
}
@@ -92,13 +196,25 @@ public class DiscoveryConfigurationService {
return configs;
}
/**
* @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet}
*/
public List<DiscoverySearchFilterFacet> getAllFacetsConfig() {
List<DiscoverySearchFilterFacet> configs = new ArrayList<>();
for (String key : map.keySet()) {
DiscoveryConfiguration config = map.get(key);
configs.addAll(config.getSidebarFacets());
}
return configs;
}
public static void main(String[] args) {
System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size());
DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(
DiscoveryConfigurationService.class
.getName(),
DiscoveryConfigurationService.class);
DiscoveryConfigurationService.class
.getName(),
DiscoveryConfigurationService.class);
for (String key : mainService.getMap().keySet()) {
System.out.println(key);
@@ -126,7 +242,7 @@ public class DiscoveryConfigurationService {
System.out.println("Recent submissions configuration:");
DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration
.getRecentSubmissionConfiguration();
.getRecentSubmissionConfiguration();
System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField());
System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax());

View File

@@ -9,6 +9,7 @@ package org.dspace.discovery.configuration;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
@@ -22,6 +23,11 @@ public class DiscoverySortConfiguration {
private List<DiscoverySortFieldConfiguration> sortFields = new ArrayList<DiscoverySortFieldConfiguration>();
/**
* Default sort configuration to use when needed
*/
@Nullable private DiscoverySortFieldConfiguration defaultSortField;
public List<DiscoverySortFieldConfiguration> getSortFields() {
return sortFields;
}
@@ -30,6 +36,14 @@ public class DiscoverySortConfiguration {
this.sortFields = sortFields;
}
public DiscoverySortFieldConfiguration getDefaultSortField() {
return defaultSortField;
}
public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) {
this.defaultSortField = configuration;
}
public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) {
if (StringUtils.isBlank(sortField)) {
return null;

View File

@@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
final Collection collection = indexableCollection.getIndexedObject();
// Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection);
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>();
@@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
return locations;
}
}
}

View File

@@ -69,7 +69,7 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
final Community community = indexableObject.getIndexedObject();
// Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community);
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>();
@@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
return locations;
}
}
}

View File

@@ -80,11 +80,13 @@ public abstract class InprogressSubmissionIndexFactoryImpl
// Add item metadata
List<DiscoveryConfiguration> discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission);
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission);
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkspaceItem) inProgressSubmission);
} else {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item);
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
}
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
indexableCollectionService.storeCommunityCollectionLocations(doc, locations);

View File

@@ -160,7 +160,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
}
// Add the item metadata
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item);
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations);
//mandatory facet to show status on mydspace

View File

@@ -64,6 +64,7 @@ public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl<IndexableMet
Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS);
// add read permission on doc for anonymous group
doc.addField("read", "g" + anonymousGroup.getID());
doc.addField(FIELD_NAME_VARIATIONS + "_sort", fieldName);
return doc;
}

View File

@@ -332,7 +332,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
}
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
if (searchSortConfiguration.getDefaultSortField() != null) {
sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
}
@@ -342,7 +344,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) {
String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
if (searchSortConfiguration.getDefaultSortField() != null) {
sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) {

View File

@@ -27,13 +27,14 @@ import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author Marsa Haoua
* @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de)
*/
public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider {
public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean {
/**
* log4j category
*/
@@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider {
@Autowired(required = true)
protected VersionHistoryService versionHistoryService;
/**
* After all the properties are set check that the versioning is enabled
*
* @throws Exception throws an exception if this isn't the case
*/
@Override
public void afterPropertiesSet() throws Exception {
if (!configurationService.getBooleanProperty("versioning.enabled", true)) {
throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() +
" is enabled, but the versioning is disabled.");
}
}
@Override
public String mint(Context context, DSpaceObject dso) throws IdentifierException {
return mint(context, dso, this.filter);

View File

@@ -35,6 +35,7 @@ import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -45,7 +46,7 @@ import org.springframework.stereotype.Component;
* @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de)
*/
@Component
public class VersionedHandleIdentifierProvider extends IdentifierProvider {
public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean {
/**
* log4j category
*/
@@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider {
@Autowired(required = true)
protected ContentServiceFactory contentServiceFactory;
/**
* After all the properties are set check that the versioning is enabled
*
* @throws Exception throws an exception if this isn't the case
*/
@Override
public void afterPropertiesSet() throws Exception {
if (!configurationService.getBooleanProperty("versioning.enabled", true)) {
throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() +
" is enabled, but the versioning is disabled.");
}
}
@Override
public boolean supports(Class<? extends Identifier> identifier) {
return Handle.class.isAssignableFrom(identifier);

View File

@@ -30,6 +30,7 @@ import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -39,7 +40,8 @@ import org.springframework.stereotype.Component;
* @author Ben Bosman (ben at atmire dot com)
*/
@Component
public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider {
public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider
implements InitializingBean {
/**
* log4j category
*/
@@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident
@Autowired(required = true)
private ItemService itemService;
/**
* After all the properties are set check that the versioning is enabled
*
* @throws Exception throws an exception if this isn't the case
*/
@Override
public void afterPropertiesSet() throws Exception {
if (!configurationService.getBooleanProperty("versioning.enabled", true)) {
throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() +
" is enabled, but the versioning is disabled.");
}
}
@Override
public boolean supports(Class<? extends Identifier> identifier) {
return Handle.class.isAssignableFrom(identifier);

View File

@@ -15,8 +15,8 @@ import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.DCDate;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
@@ -107,28 +107,30 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
LinkedList<MetadatumDTO> dayList = (LinkedList<MetadatumDTO>) day.contributeMetadata(t);
for (int i = 0; i < yearList.size(); i++) {
DCDate dcDate = null;
String resultDateString = "";
String dateString = "";
SimpleDateFormat resultFormatter = null;
if (monthList.size() > i && dayList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() +
"-" + dayList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM-dd");
} else if (monthList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM");
} else {
dateString = yearList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy");
}
int j = 0;
// Use the first dcDate that has been formatted (Config should go from most specific to most lenient)
while (j < dateFormatsToAttempt.size()) {
while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) {
String dateFormat = dateFormatsToAttempt.get(j);
try {
SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
Date date = formatter.parse(dateString);
dcDate = new DCDate(date);
values.add(metadataFieldMapping.toDCValue(field, formatter.format(date)));
break;
resultDateString = resultFormatter.format(date);
} catch (ParseException e) {
// Multiple dateformats can be configured, we don't want to print the entire stacktrace every
// time one of those formats fails.
@@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
}
j++;
}
if (dcDate == null) {
if (StringUtils.isNotBlank(resultDateString)) {
values.add(metadataFieldMapping.toDCValue(field, resultDateString));
} else {
log.info(
"Failed parsing " + dateString + ", check " +
"the configured dataformats in config/spring/api/pubmed-integration.xml");

View File

@@ -11,6 +11,8 @@ import java.text.ParseException;
import java.util.Date;
import java.util.Objects;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.service.AuthorizeService;
@@ -21,6 +23,7 @@ import org.dspace.core.Context;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.util.DateMathParser;
import org.dspace.util.TimeHelpers;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -28,9 +31,8 @@ import org.springframework.beans.factory.annotation.Autowired;
* set permission on a file. An option is defined by a name such as "open
* access", "embargo", "restricted access", etc. and some optional attributes to
* better clarify the constraints and input available to the user. For instance
* an embargo option could allow to set a start date not longer than 3 years,
* etc
*
* an embargo option could allow to set a start date not longer than 3 years.
*
* @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it)
*/
public class AccessConditionOption {
@@ -44,9 +46,9 @@ public class AccessConditionOption {
@Autowired
private ResourcePolicyService resourcePolicyService;
DateMathParser dateMathParser = new DateMathParser();
private static final Logger LOG = LogManager.getLogger();
/** An unique name identifying the access contion option **/
/** A unique name identifying the access condition option. **/
private String name;
/**
@@ -147,6 +149,9 @@ public class AccessConditionOption {
* startDate should be null. Otherwise startDate may not be null.
* @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false,
* endDate should be null. Otherwise endDate may not be null.
* @throws SQLException passed through.
* @throws AuthorizeException passed through.
* @throws ParseException passed through (indicates problem with a date).
*/
public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description,
Date startDate, Date endDate)
@@ -160,7 +165,7 @@ public class AccessConditionOption {
/**
* Validate ResourcePolicy and after update it
*
*
* @param context DSpace context
* @param resourcePolicy ResourcePolicy to update
* @throws SQLException If database error
@@ -175,17 +180,25 @@ public class AccessConditionOption {
}
/**
* Validate the policy properties, throws exceptions if any is not valid
*
* @param context DSpace context
* @param name Name of the resource policy
* @param startDate Start date of the resource policy. If {@link #getHasStartDate()}
* returns false, startDate should be null. Otherwise startDate may not be null.
* @param endDate End date of the resource policy. If {@link #getHasEndDate()}
* returns false, endDate should be null. Otherwise endDate may not be null.
* Validate the policy properties, throws exceptions if any is not valid.
*
* @param context DSpace context.
* @param name Name of the resource policy.
* @param startDate Start date of the resource policy. If
* {@link #getHasStartDate()} returns false, startDate
* should be null. Otherwise startDate may not be null.
* @param endDate End date of the resource policy. If
* {@link #getHasEndDate()} returns false, endDate should
* be null. Otherwise endDate may not be null.
* @throws IllegalStateException if a date is required and absent,
* a date is not required and present, or a date exceeds its
* configured maximum.
* @throws ParseException passed through.
*/
private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate)
throws SQLException, AuthorizeException, ParseException {
throws IllegalStateException, ParseException {
LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}",
name, startDate, endDate);
if (getHasStartDate() && Objects.isNull(startDate)) {
throw new IllegalStateException("The access condition " + getName() + " requires a start date.");
}
@@ -199,29 +212,33 @@ public class AccessConditionOption {
throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date.");
}
DateMathParser dateMathParser = new DateMathParser();
Date latestStartDate = null;
if (Objects.nonNull(getStartDateLimit())) {
latestStartDate = dateMathParser.parseMath(getStartDateLimit());
latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit()));
}
Date latestEndDate = null;
if (Objects.nonNull(getEndDateLimit())) {
latestEndDate = dateMathParser.parseMath(getEndDateLimit());
latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit()));
}
LOG.debug(" latestStartDate {}, latestEndDate {}",
latestStartDate, latestEndDate);
// throw if startDate after latestStartDate
if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) {
throw new IllegalStateException(String.format(
"The start date of access condition %s should be earlier than %s from now.",
getName(), getStartDateLimit()
"The start date of access condition %s should be earlier than %s from now (%s).",
getName(), getStartDateLimit(), dateMathParser.getNow()
));
}
// throw if endDate after latestEndDate
if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) {
throw new IllegalStateException(String.format(
"The end date of access condition %s should be earlier than %s from now.",
getName(), getEndDateLimit()
"The end date of access condition %s should be earlier than %s from now (%s).",
getName(), getEndDateLimit(), dateMathParser.getNow()
));
}
}

View File

@@ -26,12 +26,15 @@ import java.util.Map;
import java.util.TimeZone;
import java.util.regex.Pattern;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency:
* https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr
* /util/DateMathParser.java
* This class (Apache license) is copied from Apache Solr, adding some tweaks to
* resolve an unneeded dependency. See
* <a href='https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr/util/DateMathParser.java'>the original</a>.
*
* <p>
* A Simple Utility class for parsing "math" like strings relating to Dates.
*
* <p>
@@ -78,7 +81,7 @@ import java.util.regex.Pattern;
* "<code>setNow</code>" in the interim). The default value of 'now' is
* the time at the moment the <code>DateMathParser</code> instance is
* constructed, unless overridden by the {@link CommonParams#NOW NOW}
* request param.
* request parameter.
* </p>
*
* <p>
@@ -88,7 +91,7 @@ import java.util.regex.Pattern;
* cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default
* <code>TimeZone</code> used is <code>UTC</code> unless overridden by the
* {@link CommonParams#TZ TZ}
* request param.
* request parameter.
* </p>
*
* <p>
@@ -102,6 +105,8 @@ import java.util.regex.Pattern;
*/
public class DateMathParser {
private static final Logger LOG = LogManager.getLogger();
public static final TimeZone UTC = TimeZone.getTimeZone("UTC");
/**
@@ -119,12 +124,12 @@ public class DateMathParser {
/**
* A mapping from (uppercased) String labels identifying time units,
* to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to
* to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to
* set/add/roll that unit of measurement.
*
* <p>
* A single logical unit of time might be represented by multiple labels
* for convenience (ie: <code>DATE==DAYS</code>,
* for convenience (i.e. <code>DATE==DAYS</code>,
* <code>MILLI==MILLIS</code>)
* </p>
*
@@ -220,6 +225,7 @@ public class DateMathParser {
*
* @param now an optional fixed date to use as "NOW"
* @param val the string to parse
* @return result of applying the parsed expression to "NOW".
* @throws Exception
*/
public static Date parseMath(Date now, String val) throws Exception {
@@ -308,6 +314,7 @@ public class DateMathParser {
/**
* Defines this instance's concept of "now".
*
* @param n new value of "now".
* @see #getNow
*/
public void setNow(Date n) {
@@ -316,12 +323,12 @@ public class DateMathParser {
/**
* Returns a clone of this instance's concept of "now" (never null).
*
* If setNow was never called (or if null was specified) then this method
* first defines 'now' as the value dictated by the SolrRequestInfo if it
* exists -- otherwise it uses a new Date instance at the moment getNow()
* is first called.
*
* @return "now".
* @see #setNow
* @see SolrRequestInfo#getNOW
*/
@@ -334,9 +341,12 @@ public class DateMathParser {
}
/**
* Parses a string of commands relative "now" are returns the resulting Date.
* Parses a date expression relative to "now".
*
* @throws ParseException positions in ParseExceptions are token positions, not character positions.
* @param math a date expression such as "+24MONTHS".
* @return the result of applying the expression to the current time.
* @throws ParseException positions in ParseExceptions are token positions,
* not character positions.
*/
public Date parseMath(String math) throws ParseException {
/* check for No-Op */
@@ -344,6 +354,8 @@ public class DateMathParser {
return getNow();
}
LOG.debug("parsing {}", math);
ZoneId zoneId = zone.toZoneId();
// localDateTime is a date and time local to the timezone specified
LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime();
@@ -394,11 +406,44 @@ public class DateMathParser {
}
}
LOG.debug("returning {}", localDateTime);
return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant());
}
private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)");
/**
* For manual testing. With one argument, test one-argument parseMath.
* With two (or more) arguments, test two-argument parseMath.
*
* @param argv date math expressions.
* @throws java.lang.Exception passed through.
*/
public static void main(String[] argv)
throws Exception {
DateMathParser parser = new DateMathParser();
try {
Date parsed;
if (argv.length <= 0) {
System.err.println("Date math expression(s) expected.");
}
if (argv.length > 0) {
parsed = parser.parseMath(argv[0]);
System.out.format("Applied %s to implicit current time: %s%n",
argv[0], parsed.toString());
}
if (argv.length > 1) {
parsed = DateMathParser.parseMath(new Date(), argv[1]);
System.out.format("Applied %s to explicit current time: %s%n",
argv[1], parsed.toString());
}
} catch (ParseException ex) {
System.err.format("Oops: %s%n", ex.getMessage());
}
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
/**
* Various manipulations of dates and times.
*
* @author mwood
*/
public class TimeHelpers {
private static final TimeZone UTC = TimeZone.getTimeZone("UTC");
/**
* Never instantiate this class.
*/
private TimeHelpers() {}
/**
* Set a Date's time to midnight UTC.
*
* @param from some date-time.
* @return midnight UTC of the supplied date-time.
*/
public static Date toMidnightUTC(Date from) {
GregorianCalendar calendar = new GregorianCalendar(UTC);
calendar.setTime(from);
calendar.set(GregorianCalendar.HOUR_OF_DAY, 0);
calendar.set(GregorianCalendar.MINUTE, 0);
calendar.set(GregorianCalendar.SECOND, 0);
calendar.set(GregorianCalendar.MILLISECOND, 0);
return calendar.getTime();
}
}

View File

@@ -122,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper
org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided
org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks
org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long!
org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \
the repository

View File

@@ -23,6 +23,7 @@
<name-map collection-handle="123456789/qualdrop-test" submission-name="qualdroptest"/>
<name-map collection-handle="123456789/typebind-test" submission-name="typebindtest"/>
<name-map collection-handle="123456789/accessCondition-not-discoverable" submission-name="accessConditionNotDiscoverable"/>
<name-map collection-handle="123456789/test-hidden" submission-name="test-hidden"/>
</submission-map>
@@ -54,7 +55,6 @@
<heading></heading>
<processing-class>org.dspace.app.rest.submit.step.CollectionStep</processing-class>
<type>collection</type>
<scope visibility="hidden" visibilityOutside="hidden">submission</scope>
</step-definition>
<step-definition id="traditionalpageone" mandatory="true">
<heading>submit.progressbar.describe.stepone</heading>
@@ -149,6 +149,34 @@
<processing-class>org.dspace.app.rest.submit.step.ShowIdentifiersStep</processing-class>
<type>identifiers</type>
</step-definition>
<step-definition id="test-outside-workflow-hidden" mandatory="true">
<heading>submit.progressbar.describe.stepone</heading>
<processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class>
<type>submission-form</type>
<scope visibilityOutside="hidden">workflow</scope>
</step-definition>
<step-definition id="test-outside-submission-hidden" mandatory="true">
<heading>submit.progressbar.describe.stepone</heading>
<processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class>
<type>submission-form</type>
<scope visibilityOutside="hidden">submission</scope>
</step-definition>
<step-definition id="test-never-hidden" mandatory="true">
<heading></heading>
<processing-class>org.dspace.app.rest.submit.step.CollectionStep</processing-class>
<type>collection</type>
</step-definition>
<step-definition id="test-always-hidden" mandatory="true">
<heading></heading>
<processing-class>org.dspace.app.rest.submit.step.CollectionStep</processing-class>
<type>collection</type>
<scope visibility="hidden" visibilityOutside="hidden">submission</scope>
</step-definition>
</step-definitions>
<!-- The submission-definitions map lays out the detailed definition of -->
@@ -222,6 +250,13 @@
<step id="notDiscoverable"/>
</submission-process>
<submission-process name="test-hidden">
<step id="test-outside-workflow-hidden"/>
<step id="test-outside-submission-hidden"/>
<step id="test-never-hidden"/>
<step id="test-always-hidden"/>
</submission-process>
</submission-definitions>
</item-submission>

View File

@@ -436,6 +436,35 @@ it, please enter the types and the actual numbers or codes.</hint>
</field>
</row>
</form>
<form name="test-outside-workflow-hidden">
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>title</dc-element>
<dc-qualifier></dc-qualifier>
<repeatable>false</repeatable>
<label>Title</label>
<input-type>onebox</input-type>
<required>Field required</required>
</field>
</row>
</form>
<form name="test-outside-submission-hidden">
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>type</dc-element>
<dc-qualifier></dc-qualifier>
<repeatable>false</repeatable>
<label>Type</label>
<input-type>onebox</input-type>
<required>Field required</required>
</field>
</row>
</form>
</form-definitions>

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.nio.file.Files;
@@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.flywaydb.core.internal.util.ExceptionUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -46,6 +48,7 @@ import org.junit.Test;
public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private static final String ZIP_NAME = "saf.zip";
private static final String PDF_NAME = "test.pdf";
private static final String publicationTitle = "A Tale of Two Cities";
private static final String personTitle = "Person Test";
@@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private Collection collection;
private Path tempDir;
private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before
@Override
@@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkMetadata();
checkMetadataWithAnotherSchema();
checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
}
@Test
@@ -254,6 +262,23 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkRelationship();
}
@Test
public void importItemByZipSafInvalidMimetype() throws Exception {
// use sample PDF file
Files.copy(getClass().getResourceAsStream("test.pdf"),
Path.of(tempDir.toString() + "/" + PDF_NAME));
String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(),
"-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString()
+ "/mapfile.out" };
try {
perfomImportScript(args);
} catch (Exception e) {
// should throw an exception due to invalid mimetype
assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass());
}
}
@Test
public void resumeImportItemBySafWithMetadataOnly() throws Exception {
// create simple SAF

View File

@@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
private Community community;
protected CommunityBuilder(Context context) {
super(context);
}
public static CommunityBuilder createCommunity(final Context context) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.create();
return builder.create(null);
}
public static CommunityBuilder createCommunity(final Context context, String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.create(handle);
}
private CommunityBuilder create() {
return createSubCommunity(context, null);
private CommunityBuilder create(String handle) {
return createSubCommunity(context, null, handle);
}
public static CommunityBuilder createSubCommunity(final Context context, final Community parent) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent);
return builder.createSub(parent, null);
}
private CommunityBuilder createSub(final Community parent) {
public static CommunityBuilder createSubCommunity(final Context context, final Community parent,
final String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent, handle);
}
private CommunityBuilder createSub(final Community parent, String handle) {
try {
community = communityService.create(parent, context);
community = communityService.create(parent, context, handle);
} catch (Exception e) {
e.printStackTrace();
return null;
@@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
@Override
public Community build() {
try {
communityService.update(context, community);
context.dispatchEvents();

View File

@@ -189,6 +189,10 @@ public class WorkspaceItemBuilder extends AbstractBuilder<WorkspaceItem, Workspa
return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject);
}
public WorkspaceItemBuilder withType(final String type) {
return addMetadataValue(MetadataSchemaEnum.DC.getName(),"type", null, type);
}
public WorkspaceItemBuilder grantLicense() {
Item item = workspaceItem.getItem();
String license;

View File

@@ -725,9 +725,6 @@ public class CollectionTest extends AbstractDSpaceObjectTest {
// Allow Item REMOVE perms
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE));
// Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion)
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE));
// create & add item first
context.turnOffAuthorisationSystem();

View File

@@ -1189,8 +1189,6 @@ public class ItemTest extends AbstractDSpaceObjectTest {
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true);
// Allow Item DELETE perms
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE);
// Allow Item WRITE perms (required to first delete identifiers)
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE);
UUID id = item.getID();
itemService.delete(context, item);

View File

@@ -7,14 +7,18 @@
*/
package org.dspace.discovery;
import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase;
@@ -24,6 +28,7 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.PoolTaskBuilder;
import org.dspace.builder.WorkflowItemBuilder;
@@ -39,6 +44,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.IndexableClaimedTask;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableItem;
@@ -731,6 +738,64 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
}
}
/**
* Test designed to check if default sort option for Discovery is working, using <code>workspace</code>
* DiscoveryConfiguration <br/>
* <b>Note</b>: this test will be skipped if <code>workspace</code> do not have a default sort option set and of
* metadataType <code>dc_date_accessioned</code> or <code>lastModified</code>
* @throws SearchServiceException
*/
@Test
public void searchWithDefaultSortServiceTest() throws SearchServiceException {
DiscoveryConfiguration workspaceConf =
SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null);
// Skip if no default sort option set for workspaceConf
if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) {
return;
}
DiscoverySortFieldConfiguration defaultSortField =
workspaceConf.getSearchSortConfiguration().getDefaultSortField();
// Populate the testing objects: create items in eperson's workspace and perform search in it
int numberItems = 10;
context.turnOffAuthorisationSystem();
EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build();
context.setCurrentUser(submitter);
Community community = CommunityBuilder.createCommunity(context).build();
Collection collection = CollectionBuilder.createCollection(context, community).build();
for (int i = 0; i < numberItems; i++) {
ItemBuilder.createItem(context, collection)
.withTitle("item " + i)
.build();
}
context.restoreAuthSystemState();
// Build query with default parameters (except for workspaceConf)
DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder()
.buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null,
null,null);
DiscoverResult result = searchService.search(context, discoverQuery);
/*
// code example for testing against sort by dc_date_accessioned
LinkedList<String> dc_date_accesioneds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getMetadata())
.map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned"))
.map(m -> m.getValue()).findFirst().orElse("")
)
.collect(Collectors.toCollection(LinkedList::new));
}*/
LinkedList<String> lastModifieds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getLastModified().toString())
.collect(Collectors.toCollection(LinkedList::new));
assertFalse(lastModifieds.isEmpty());
for (int i = 1; i < lastModifieds.size() - 1; i++) {
assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0);
}
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1);
}

View File

@@ -0,0 +1,34 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
import static org.junit.Assert.assertEquals;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Date;
import org.junit.Test;
/**
* Test {@link TimeHelpers}.
* @author Mark H. Wood <mwood@iupui.edu>
*/
public class TimeHelpersTest {
/**
* Test of toMidnightUTC method, of class TimeHelpers.
*/
@Test
public void testToMidnightUTC() {
System.out.println("toMidnightUTC");
Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant());
Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant());
Date result = TimeHelpers.toMidnightUTC(from);
assertEquals(expResult, result);
}
}

View File

@@ -118,7 +118,8 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
}
/**
* Constructs a solr search URL.
* Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2.
* https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2
*
* @param query the search terms
* @param manifestId the id of the manifest in which to search
@@ -132,8 +133,9 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
solrQuery.set("hl.ocr.fl", "ocr_text");
solrQuery.set("hl.ocr.contextBlock", "line");
solrQuery.set("hl.ocr.contextSize", "2");
solrQuery.set("hl.snippets", "10");
solrQuery.set("hl.ocr.trackPages", "off");
solrQuery.set("hl.snippets", "8192");
solrQuery.set("hl.ocr.maxPassages", "8192");
solrQuery.set("hl.ocr.trackPages", "on");
solrQuery.set("hl.ocr.limitBlock","page");
solrQuery.set("hl.ocr.absoluteHighlights", "true");

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.xoai.app.plugins;
import java.sql.SQLException;
import java.util.List;
import com.lyncode.xoai.dataprovider.xml.xoai.Element;
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
import org.dspace.access.status.factory.AccessStatusServiceFactory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin;
import org.dspace.xoai.util.ItemUtils;
/**
* AccessStatusElementItemCompilePlugin aims to add structured information about the
* Access Status of the item (if any).
* The xoai document will be enriched with a structure like that
* <pre>
* {@code
* <element name="others">
* <element name="access-status">
* <field name="value">open.access</field>
* </element>
* </element>
* }
* </pre>
* Returning Values are based on:
* @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper
*/
public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin {
@Override
public Metadata additionalMetadata(Context context, Metadata metadata, Item item) {
AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService();
try {
String accessStatusType;
accessStatusType = accessStatusService.getAccessStatus(context, item);
Element accessStatus = ItemUtils.create("access-status");
accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType));
Element others;
List<Element> elements = metadata.getElement();
if (ItemUtils.getElement(elements, "others") != null) {
others = ItemUtils.getElement(elements, "others");
} else {
others = ItemUtils.create("others");
}
others.getElement().add(accessStatus);
} catch (SQLException e) {
e.printStackTrace();
}
return metadata;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.repository.BitstreamRestRepository;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* REST controller for handling bulk updates to Bitstream resources.
* <p>
* This controller is responsible for handling requests to the bitstream category, which allows for updating
* multiple bitstream resources in a single operation.
* </p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@RestController
@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME)
public class BitstreamCategoryRestController {
@Autowired
BitstreamRestRepository bitstreamRestRepository;
/**
* Handles PATCH requests to the bitstream category for bulk updates of bitstream resources.
*
* @param request the HTTP request object.
* @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied.
* @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a
* HTTP 204 No Content response since currently only a delete operation is supported.
* @throws SQLException if an error occurs while accessing the database.
* @throws AuthorizeException if the user is not authorized to perform the requested operation.
*/
@RequestMapping(method = RequestMethod.PATCH)
public ResponseEntity<RepresentationModel<?>> patch(HttpServletRequest request,
@RequestBody(required = true) JsonNode jsonNode)
throws SQLException, AuthorizeException {
Context context = obtainContext(request);
bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode);
return ResponseEntity.noContent().build();
}
}

View File

@@ -176,7 +176,7 @@ public class OpenSearchController {
if (dsoObject != null) {
container = scopeResolver.resolveScope(context, dsoObject);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso("site", container);
.getDiscoveryConfiguration(context, container);
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray(

View File

@@ -86,6 +86,10 @@ public abstract class AInprogressItemConverter<T extends InProgressSubmission,
for (SubmissionSectionRest sections : def.getPanels()) {
SubmissionStepConfig stepConfig = submissionSectionConverter.toModel(sections);
if (stepConfig.isHiddenForInProgressSubmission(obj)) {
continue;
}
/*
* First, load the step processing class (using the current
* class loader)

View File

@@ -7,12 +7,17 @@
*/
package org.dspace.app.rest.converter;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import java.util.ArrayList;
import java.util.List;
import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.browse.BrowseIndex;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.sort.SortException;
import org.dspace.sort.SortOption;
import org.springframework.stereotype.Component;
@@ -30,18 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
public BrowseIndexRest convert(BrowseIndex obj, Projection projection) {
BrowseIndexRest bir = new BrowseIndexRest();
bir.setProjection(projection);
bir.setId(obj.getName());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setMetadataBrowse(obj.isMetadataIndex());
List<String> metadataList = new ArrayList<String>();
if (obj.isMetadataIndex()) {
String id = obj.getName();
if (obj instanceof DSpaceControlledVocabularyIndex) {
DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj;
metadataList = new ArrayList<>(vocObj.getMetadataFields());
id = vocObj.getVocabulary().getPluginInstanceName();
bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName());
bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName());
bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL);
} else if (obj.isMetadataIndex()) {
for (String s : obj.getMetadata().split(",")) {
metadataList.add(s.trim());
}
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_VALUE_LIST);
} else {
metadataList.add(obj.getSortOption().getMetadata());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_FLAT);
}
bir.setId(id);
bir.setMetadataList(metadataList);
List<BrowseIndexRest.SortOption> sortOptionsList = new ArrayList<BrowseIndexRest.SortOption>();
@@ -52,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
} catch (SortException e) {
throw new RuntimeException(e.getMessage(), e);
}
bir.setSortOptions(sortOptionsList);
if (!bir.getBrowseType().equals(BROWSE_TYPE_HIERARCHICAL)) {
bir.setSortOptions(sortOptionsList);
}
return bir;
}

View File

@@ -80,6 +80,15 @@ public class DiscoverConfigurationConverter
sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name());
searchConfigurationRest.addSortOption(sortOption);
}
DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField();
if (defaultSortField != null) {
SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption();
sortOption.setName(defaultSortField.getMetadataField());
sortOption.setActualName(defaultSortField.getType());
sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name());
searchConfigurationRest.setDefaultSortOption(sortOption);
}
}
}

View File

@@ -6,7 +6,9 @@
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.converter;
import java.text.ParseException;
import java.util.Date;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.AccessConditionOptionRest;
@@ -15,6 +17,7 @@ import org.dspace.app.rest.projection.Projection;
import org.dspace.submit.model.AccessConditionConfiguration;
import org.dspace.submit.model.AccessConditionOption;
import org.dspace.util.DateMathParser;
import org.dspace.util.TimeHelpers;
import org.springframework.stereotype.Component;
/**
@@ -27,21 +30,21 @@ import org.springframework.stereotype.Component;
public class SubmissionAccessOptionConverter
implements DSpaceConverter<AccessConditionConfiguration, SubmissionAccessOptionRest> {
DateMathParser dateMathParser = new DateMathParser();
@Override
public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, Projection projection) {
SubmissionAccessOptionRest model = new SubmissionAccessOptionRest();
model.setId(config.getName());
model.setCanChangeDiscoverable(config.getCanChangeDiscoverable());
model.setProjection(projection);
DateMathParser dateMathParser = new DateMathParser();
for (AccessConditionOption option : config.getOptions()) {
AccessConditionOptionRest optionRest = new AccessConditionOptionRest();
optionRest.setHasStartDate(option.getHasStartDate());
optionRest.setHasEndDate(option.getHasEndDate());
if (StringUtils.isNotBlank(option.getStartDateLimit())) {
try {
optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit()));
Date requested = dateMathParser.parseMath(option.getStartDateLimit());
optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested));
} catch (ParseException e) {
throw new IllegalStateException("Wrong start date limit configuration for the access condition "
+ "option named " + option.getName());
@@ -49,7 +52,8 @@ public class SubmissionAccessOptionConverter
}
if (StringUtils.isNotBlank(option.getEndDateLimit())) {
try {
optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit()));
Date requested = dateMathParser.parseMath(option.getEndDateLimit());
optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested));
} catch (ParseException e) {
throw new IllegalStateException("Wrong end date limit configuration for the access condition "
+ "option named " + option.getName());

View File

@@ -174,6 +174,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
GroupNameNotProvidedException.class,
GroupHasPendingWorkflowTasksException.class,
PasswordNotValidException.class,
RESTBitstreamNotFoundException.class
})
protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
TranslatableException ex) throws IOException {

View File

@@ -0,0 +1,51 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.exception;
import java.text.MessageFormat;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
/**
* <p>Extend {@link UnprocessableEntityException} to provide a specific error message
* in the REST response. The error message is added to the response in
* {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException},
* hence it should not contain sensitive or security-compromising info.</p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException {
public static String uuid;
/**
* @param formatStr string with placeholders, ideally obtained using {@link I18nUtil}
* @return message with bitstream id substituted
*/
private static String formatMessage(String formatStr) {
MessageFormat fmt = new MessageFormat(formatStr);
return fmt.format(new String[]{uuid});
}
public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message";
public RESTBitstreamNotFoundException(String uuid) {
super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY)));
RESTBitstreamNotFoundException.uuid = uuid;
}
public String getMessageKey() {
return MESSAGE_KEY;
}
public String getLocalizedMessage(Context context) {
return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context));
}
}

View File

@@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory<BrowseEntryResourc
UriComponentsBuilder baseLink = uriBuilder(
getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(),
English.plural(bix.getType()), bix.getId(),
BrowseIndexRest.ITEMS, null, null));
BrowseIndexRest.LINK_ITEMS, null, null));
addFilterParams(baseLink, data);
list.add(buildLink(BrowseIndexRest.ITEMS,
list.add(buildLink(BrowseIndexRest.LINK_ITEMS,
baseLink.build().encode().toUriString()));
}
}

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.dspace.app.rest.RestResourceController;
@@ -20,11 +21,11 @@ import org.dspace.app.rest.RestResourceController;
*/
@LinksRest(links = {
@LinkRest(
name = BrowseIndexRest.ITEMS,
name = BrowseIndexRest.LINK_ITEMS,
method = "listBrowseItems"
),
@LinkRest(
name = BrowseIndexRest.ENTRIES,
name = BrowseIndexRest.LINK_ENTRIES,
method = "listBrowseEntries"
)
})
@@ -35,20 +36,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
public static final String CATEGORY = RestAddressableModel.DISCOVER;
public static final String ITEMS = "items";
public static final String ENTRIES = "entries";
public static final String LINK_ITEMS = "items";
public static final String LINK_ENTRIES = "entries";
public static final String LINK_VOCABULARY = "vocabulary";
boolean metadataBrowse;
// if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types,
// etc. the second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_VALUE_LIST = "valueList";
// if the browse index has one level: the full list of items
public static final String BROWSE_TYPE_FLAT = "flatBrowse";
// if the browse index should display the vocabulary tree. The 1st level shows the tree.
// The second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse";
// Shared fields
String browseType;
@JsonProperty(value = "metadata")
List<String> metadataList;
// Single browse index fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String dataType;
@JsonInclude(JsonInclude.Include.NON_NULL)
List<SortOption> sortOptions;
@JsonInclude(JsonInclude.Include.NON_NULL)
String order;
// Hierarchical browse fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String facetType;
@JsonInclude(JsonInclude.Include.NON_NULL)
String vocabulary;
@JsonIgnore
@Override
public String getCategory() {
@@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
return NAME;
}
public boolean isMetadataBrowse() {
return metadataBrowse;
}
public void setMetadataBrowse(boolean metadataBrowse) {
this.metadataBrowse = metadataBrowse;
}
public List<String> getMetadataList() {
return metadataList;
}
@@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
this.sortOptions = sortOptions;
}
/**
* - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names,
* subjects, types, etc. the second level is the actual list of items linked to a specific entry
* - flatBrowse if the browse index has one level: the full list of items
* - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree.
* The second level is the actual list of items linked to a specific entry
*/
public void setBrowseType(String browseType) {
this.browseType = browseType;
}
public String getBrowseType() {
return browseType;
}
public void setFacetType(String facetType) {
this.facetType = facetType;
}
public String getFacetType() {
return facetType;
}
public void setVocabulary(String vocabulary) {
this.vocabulary = vocabulary;
}
public String getVocabulary() {
return vocabulary;
}
@Override
public Class getController() {
return RestResourceController.class;

View File

@@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
private List<Filter> filters = new LinkedList<>();
private List<SortOption> sortOptions = new LinkedList<>();
private SortOption defaultSortOption;
public String getCategory() {
return CATEGORY;
}
@@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
return sortOptions;
}
public SortOption getDefaultSortOption() {
return defaultSortOption;
}
public void setDefaultSortOption(SortOption defaultSortOption) {
this.defaultSortOption = defaultSortOption;
}
@Override
public boolean equals(Object object) {
return (object instanceof SearchConfigurationRest &&

View File

@@ -7,9 +7,20 @@
*/
package org.dspace.app.rest.model.hateoas;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import org.atteo.evo.inflector.English;
import org.dspace.app.rest.RestResourceController;
import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.model.VocabularyRest;
import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource;
import org.dspace.app.rest.utils.Utils;
import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.factory.ContentAuthorityServiceFactory;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.springframework.hateoas.Link;
import org.springframework.web.util.UriComponentsBuilder;
/**
* Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource
@@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils;
*/
@RelNameDSpaceResource(BrowseIndexRest.NAME)
public class BrowseIndexResource extends DSpaceResource<BrowseIndexRest> {
public BrowseIndexResource(BrowseIndexRest bix, Utils utils) {
super(bix, utils);
// TODO: the following code will force the embedding of items and
// entries in the browseIndex we need to find a way to populate the rels
// array from the request/projection right now it is always null
// super(bix, utils, "items", "entries");
if (bix.isMetadataBrowse()) {
add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES));
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES));
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) {
ChoiceAuthorityService choiceAuthorityService =
ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService();
ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary());
UriComponentsBuilder baseLink = linkTo(
methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null,
null, VocabularyRest.CATEGORY,
English.plural(VocabularyRest.NAME), source.getPluginInstanceName(),
"", null, null)).toUriComponentsBuilder();
add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY));
}
add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS));
}
}

View File

@@ -15,9 +15,12 @@ import java.util.List;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.JsonPatchConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
@@ -38,6 +41,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -72,6 +76,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
@Autowired
private HandleService handleService;
@Autowired
ConfigurationService configurationService;
@Autowired
public BitstreamRestRepository(BitstreamService dsoService) {
super(dsoService);
@@ -248,4 +255,25 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
return converter.toRest(targetBundle, utils.obtainProjection());
}
/**
* Method that will transform the provided PATCH json body into a list of operations.
* The operations will be handled by a supporting class resolved by the
* {@link org.dspace.app.rest.repository.patch.ResourcePatch#patch} method.
*
* @param context The context
* @param jsonNode the json body provided from the request body
*/
public void patchBitstreamsInBulk(Context context, JsonNode jsonNode) throws SQLException {
int operationsLimit = configurationService.getIntProperty("rest.patch.operations.limit", 1000);
ObjectMapper mapper = new ObjectMapper();
JsonPatchConverter patchConverter = new JsonPatchConverter(mapper);
Patch patch = patchConverter.convert(jsonNode);
if (patch.getOperations().size() > operationsLimit) {
throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " +
operationsLimit);
}
resourcePatch.patch(obtainContext(), null, patch.getOperations());
context.commit();
}
}

View File

@@ -40,7 +40,7 @@ import org.springframework.stereotype.Component;
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES)
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES)
public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
@Override
public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data;
if (bir.isMetadataBrowse() && "entries".equals(name)) {
if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) &&
name.equals(BrowseIndexRest.LINK_ENTRIES)) {
return true;
}
return false;

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -17,7 +18,10 @@ import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex;
import org.dspace.browse.CrossLinks;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
@@ -31,26 +35,48 @@ import org.springframework.stereotype.Component;
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME)
public class BrowseIndexRestRepository extends DSpaceRestRepository<BrowseIndexRest, String> {
@Autowired
private ChoiceAuthorityService choiceAuthorityService;
@Override
@PreAuthorize("permitAll()")
public BrowseIndexRest findOne(Context context, String name) {
BrowseIndexRest bi = null;
BrowseIndexRest bi = createFromMatchingBrowseIndex(name);
if (bi == null) {
bi = createFromMatchingVocabulary(name);
}
return bi;
}
private BrowseIndexRest createFromMatchingVocabulary(String name) {
DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name);
if (vocabularyIndex != null) {
return converter.toRest(vocabularyIndex, utils.obtainProjection());
}
return null;
}
private BrowseIndexRest createFromMatchingBrowseIndex(String name) {
BrowseIndex bix;
try {
bix = BrowseIndex.getBrowseIndex(name);
bix = BrowseIndex.getBrowseIndex(name);
} catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e);
}
if (bix != null) {
bi = converter.toRest(bix, utils.obtainProjection());
return converter.toRest(bix, utils.obtainProjection());
}
return bi;
return null;
}
@Override
public Page<BrowseIndexRest> findAll(Context context, Pageable pageable) {
try {
List<BrowseIndex> indexes = Arrays.asList(BrowseIndex.getBrowseIndices());
List<BrowseIndex> indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices()));
choiceAuthorityService.getChoiceAuthoritiesNames()
.stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null)
.forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name)));
return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection());
} catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e);

View File

@@ -42,7 +42,7 @@ import org.springframework.stereotype.Component;
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS)
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS)
public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
@Override
public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data;
if (!bir.isMetadataBrowse() && "items".equals(name)) {
if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) &&
name.equals(BrowseIndexRest.LINK_ITEMS)) {
return true;
}
return false;

View File

@@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
.getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
}
@@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Context context = obtainContext();
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
.getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;
@@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
.getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration);
}
@@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
.getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName);
@@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Pageable page = PageRequest.of(1, 1);
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject);
.getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null;

View File

@@ -14,6 +14,7 @@ import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
@@ -45,10 +46,10 @@ import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
* This is the repository responsible to manage MetadataField Rest object
*
@@ -135,13 +136,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
@Parameter(value = "exactName", required = false) String exactName,
Pageable pageable) throws SQLException {
Context context = obtainContext();
long totalElements = 0;
List<MetadataField> matchingMetadataFields = new ArrayList<>();
if (StringUtils.isBlank(exactName)) {
// Find matches in Solr Search core
DiscoverQuery discoverQuery =
this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query);
this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable);
try {
DiscoverResult searchResult = searchService.search(context, null, discoverQuery);
for (IndexableObject object : searchResult.getIndexableObjects()) {
@@ -149,6 +151,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
matchingMetadataFields.add(((IndexableMetadataField) object).getIndexedObject());
}
}
totalElements = searchResult.getTotalSearchResults();
} catch (SearchServiceException e) {
log.error("Error while searching with Discovery", e);
throw new IllegalArgumentException("Error while searching with Discovery: " + e.getMessage());
@@ -163,10 +166,11 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
MetadataField exactMatchingMdField = metadataFieldService.findByString(context, exactName, '.');
if (exactMatchingMdField != null) {
matchingMetadataFields.add(exactMatchingMdField);
totalElements = 1;
}
}
return converter.toRestPage(matchingMetadataFields, pageable, utils.obtainProjection());
return converter.toRestPage(matchingMetadataFields, pageable, totalElements, utils.obtainProjection());
}
/**
@@ -182,7 +186,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
* @throws SQLException If DB error
*/
private DiscoverQuery createDiscoverQuery(Context context, String schemaName, String elementName,
String qualifierName, String query) throws SQLException {
String qualifierName, String query, Pageable pageable) throws SQLException {
List<String> filterQueries = new ArrayList<>();
if (StringUtils.isNotBlank(query)) {
if (query.split("\\.").length > 3) {
@@ -210,6 +214,15 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries(filterQueries.toArray(new String[filterQueries.size()]));
Iterator<Sort.Order> orderIterator = pageable.getSort().iterator();
if (orderIterator.hasNext()) {
Sort.Order order = orderIterator.next();
discoverQuery.setSortField(order.getProperty() + "_sort",
order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc :
DiscoverQuery.SORT_ORDER.desc);
}
discoverQuery.setStart(Math.toIntExact(pageable.getOffset()));
discoverQuery.setMaxResults(pageable.getPageSize());
return discoverQuery;
}
@@ -247,10 +260,18 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
if (isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
} else if (!metadataFieldRest.getElement().matches("^[^. ,]{1,64}$")) {
throw new UnprocessableEntityException(
"metadata element (in request body) cannot contain dots, commas or spaces and should be smaller than" +
" 64 characters");
}
if (isBlank(metadataFieldRest.getQualifier())) {
metadataFieldRest.setQualifier(null);
} else if (!metadataFieldRest.getQualifier().matches("^[^. ,]{1,64}$")) {
throw new UnprocessableEntityException(
"metadata qualifier (in request body) cannot contain dots, commas or spaces and should be smaller" +
" than 64 characters");
}
// create
@@ -300,24 +321,26 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
try {
metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class);
} catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e);
throw new DSpaceBadRequestException("Cannot parse JSON in request body", e);
}
if (metadataFieldRest == null || isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
MetadataField metadataField = metadataFieldService.find(context, id);
if (metadataField == null) {
throw new UnprocessableEntityException("metadata field with id: " + id + " not found");
}
if (!Objects.equals(metadataFieldRest.getElement(), metadataField.getElement())) {
throw new UnprocessableEntityException("Metadata element cannot be updated.");
}
if (!Objects.equals(metadataFieldRest.getQualifier(), metadataField.getQualifier())) {
throw new UnprocessableEntityException("Metadata qualifier cannot be updated.");
}
if (!Objects.equals(id, metadataFieldRest.getId())) {
throw new UnprocessableEntityException("ID in request body doesn't match path ID");
}
MetadataField metadataField = metadataFieldService.find(context, id);
if (metadataField == null) {
throw new ResourceNotFoundException("metadata field with id: " + id + " not found");
}
metadataField.setElement(metadataFieldRest.getElement());
metadataField.setQualifier(metadataFieldRest.getQualifier());
metadataField.setScopeNote(metadataFieldRest.getScopeNote());
try {

View File

@@ -93,6 +93,10 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
// validate fields
if (isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank");
} else if (!metadataSchemaRest.getPrefix().matches("^[^. ,]{1,32}$")) {
throw new UnprocessableEntityException(
"metadata schema namespace cannot contain dots, commas or spaces and should be smaller than" +
" 32 characters");
}
if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
@@ -142,11 +146,16 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
try {
metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class);
} catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e);
throw new DSpaceBadRequestException("Cannot parse JSON in request body", e);
}
if (metadataSchemaRest == null || isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank");
MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
if (!Objects.equals(metadataSchemaRest.getPrefix(), metadataSchema.getName())) {
throw new UnprocessableEntityException("Metadata schema name cannot be updated.");
}
if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
@@ -156,12 +165,6 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
throw new UnprocessableEntityException("ID in request doesn't match path ID");
}
MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
metadataSchema.setName(metadataSchemaRest.getPrefix());
metadataSchema.setNamespace(metadataSchemaRest.getNamespace());
try {

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.repository;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
@@ -18,11 +19,11 @@ import org.dspace.app.rest.model.AccessConditionOptionRest;
import org.dspace.app.rest.model.SubmissionUploadRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.core.Context;
import org.dspace.eperson.service.GroupService;
import org.dspace.submit.model.AccessConditionOption;
import org.dspace.submit.model.UploadConfiguration;
import org.dspace.submit.model.UploadConfigurationService;
import org.dspace.util.DateMathParser;
import org.dspace.util.TimeHelpers;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -47,11 +48,6 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository<Submiss
@Autowired
private UploadConfigurationService uploadConfigurationService;
@Autowired
GroupService groupService;
DateMathParser dateMathParser = new DateMathParser();
@PreAuthorize("hasAuthority('AUTHENTICATED')")
@Override
public SubmissionUploadRest findOne(Context context, String submitName) {
@@ -70,7 +66,7 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository<Submiss
Collection<UploadConfiguration> uploadConfigs = uploadConfigurationService.getMap().values();
Projection projection = utils.obtainProjection();
List<SubmissionUploadRest> results = new ArrayList<>();
List<String> configNames = new ArrayList<String>();
List<String> configNames = new ArrayList<>();
for (UploadConfiguration uploadConfig : uploadConfigs) {
if (!configNames.contains(uploadConfig.getName())) {
configNames.add(uploadConfig.getName());
@@ -92,13 +88,15 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository<Submiss
private SubmissionUploadRest convert(Context context, UploadConfiguration config, Projection projection) {
SubmissionUploadRest result = new SubmissionUploadRest();
result.setProjection(projection);
DateMathParser dateMathParser = new DateMathParser();
for (AccessConditionOption option : config.getOptions()) {
AccessConditionOptionRest optionRest = new AccessConditionOptionRest();
optionRest.setHasStartDate(option.getHasStartDate());
optionRest.setHasEndDate(option.getHasEndDate());
if (StringUtils.isNotBlank(option.getStartDateLimit())) {
try {
optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit()));
Date requested = dateMathParser.parseMath(option.getStartDateLimit());
optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested));
} catch (ParseException e) {
throw new IllegalStateException("Wrong start date limit configuration for the access condition "
+ "option named " + option.getName());
@@ -106,7 +104,8 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository<Submiss
}
if (StringUtils.isNotBlank(option.getEndDateLimit())) {
try {
optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit()));
Date requested = dateMathParser.parseMath(option.getEndDateLimit());
optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested));
} catch (ParseException e) {
throw new IllegalStateException("Wrong end date limit configuration for the access condition "
+ "option named " + option.getName());

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository.patch.operation;
import java.io.IOException;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.rest.exception.RESTBitstreamNotFoundException;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Bitstream;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.stereotype.Component;
/**
* A PATCH operation for removing bitstreams in bulk from the repository.
*
* Example: <code>
* curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json"
* -d '[
* {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"}
* ]'
* </code>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@Component
public class BitstreamRemoveOperation extends PatchOperation<Bitstream> {
@Autowired
BitstreamService bitstreamService;
@Autowired
AuthorizeService authorizeService;
public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/";
@Override
public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException {
String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, "");
Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete));
if (bitstreamToDelete == null) {
throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete);
}
authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE);
try {
bitstreamService.delete(context, bitstreamToDelete);
} catch (AuthorizeException | IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
return null;
}
@Override
public boolean supports(Object objectToMatch, Operation operation) {
return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) &&
operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE);
}
public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation)
throws SQLException {
try {
authorizeService.authorizeAction(context, bitstream, operation);
} catch (AuthorizeException e) {
throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e);
}
}
}

View File

@@ -6,9 +6,11 @@
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.submit.factory.impl;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
@@ -23,11 +25,12 @@ import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.submit.model.AccessConditionConfiguration;
import org.dspace.submit.model.AccessConditionConfigurationService;
import org.dspace.util.TimeHelpers;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Submission "add" operation to add custom resource policies.
*
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class AccessConditionAddPatchOperation extends AddPatchOperation<AccessConditionDTO> {
@@ -52,6 +55,18 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation<AccessCo
String[] absolutePath = getAbsolutePath(path).split("/");
List<AccessConditionDTO> accessConditions = parseAccessConditions(path, value, absolutePath);
// Clamp access condition dates to midnight UTC
for (AccessConditionDTO condition : accessConditions) {
Date date = condition.getStartDate();
if (null != date) {
condition.setStartDate(TimeHelpers.toMidnightUTC(date));
}
date = condition.getEndDate();
if (null != date) {
condition.setEndDate(TimeHelpers.toMidnightUTC(date));
}
}
verifyAccessConditions(context, configuration, accessConditions);
if (absolutePath.length == 1) {
@@ -65,7 +80,7 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation<AccessCo
}
private List<AccessConditionDTO> parseAccessConditions(String path, Object value, String[] split) {
List<AccessConditionDTO> accessConditions = new ArrayList<AccessConditionDTO>();
List<AccessConditionDTO> accessConditions = new ArrayList<>();
if (split.length == 1) {
accessConditions = evaluateArrayObject((LateObjectEvaluator) value);
} else if (split.length == 2) {

View File

@@ -6,6 +6,7 @@
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.submit.factory.impl;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -29,6 +30,7 @@ import org.dspace.core.Context;
import org.dspace.submit.model.AccessConditionConfiguration;
import org.dspace.submit.model.AccessConditionConfigurationService;
import org.dspace.submit.model.AccessConditionOption;
import org.dspace.util.TimeHelpers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -106,7 +108,7 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation<
return null;
}
private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplare)
private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplace)
throws ParseException {
AccessConditionDTO accessCondition = new AccessConditionDTO();
accessCondition.setName(rpToReplace.getRpName());
@@ -114,13 +116,13 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation<
accessCondition.setEndDate(rpToReplace.getEndDate());
switch (attributeReplace) {
case "name":
accessCondition.setName(valueToReplare);
accessCondition.setName(valueToReplace);
return accessCondition;
case "startDate":
accessCondition.setStartDate(parseDate(valueToReplare));
accessCondition.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace)));
return accessCondition;
case "endDate":
accessCondition.setEndDate(parseDate(valueToReplare));
accessCondition.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace)));
return accessCondition;
default:
throw new UnprocessableEntityException("The provided attribute: "
@@ -128,17 +130,17 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation<
}
}
private void updatePolicy(Context context, String valueToReplare, String attributeReplace,
private void updatePolicy(Context context, String valueToReplace, String attributeReplace,
ResourcePolicy rpToReplace) throws SQLException, AuthorizeException {
switch (attributeReplace) {
case "name":
rpToReplace.setRpName(valueToReplare);
rpToReplace.setRpName(valueToReplace);
break;
case "startDate":
rpToReplace.setStartDate(parseDate(valueToReplare));
rpToReplace.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace)));
break;
case "endDate":
rpToReplace.setEndDate(parseDate(valueToReplare));
rpToReplace.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace)));
break;
default:
throw new IllegalArgumentException("Attribute to replace is not valid:" + attributeReplace);

View File

@@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
@@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
private Collection collection;
private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before
@Override
@@ -126,6 +128,10 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
checkMetadata();
checkMetadataWithAnotherSchema();
checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
}
@Test

View File

@@ -7,12 +7,16 @@
*/
package org.dspace.app.rest;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist;
import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE;
import static org.dspace.core.Constants.WRITE;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
@@ -21,9 +25,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.UUID;
import javax.ws.rs.core.MediaType;
import org.apache.commons.codec.CharEncoding;
import org.apache.commons.io.IOUtils;
@@ -33,6 +39,7 @@ import org.dspace.app.rest.matcher.BundleMatcher;
import org.dspace.app.rest.matcher.HalMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.RemoveOperation;
import org.dspace.app.rest.model.patch.ReplaceOperation;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
@@ -41,6 +48,7 @@ import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.BundleBuilder;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.ResourcePolicyBuilder;
import org.dspace.content.Bitstream;
@@ -52,15 +60,20 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.web.servlet.MvcResult;
public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest {
@@ -79,6 +92,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
@Autowired
private ItemService itemService;
@Autowired
CollectionService collectionService;
@Autowired
CommunityService communityService;
@Test
public void findAllTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
@@ -2370,6 +2389,513 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
));
}
@Test
public void deleteBitstreamsInBulk() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Verify that only the three bitstreams were deleted and the fourth one still exists
Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3));
Assert.assertTrue(bitstreamExists(token, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidUUID() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// For the third bitstream, use an invalid UUID
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
UUID randomUUID = UUID.randomUUID();
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID);
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
MvcResult result = getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnprocessableEntity())
.andReturn();
// Verify our custom error message is returned when an invalid UUID is used
assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository",
result.getResponse().getErrorMessage());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// But set the rest.patch.operations.limit property to 2, so that the request is invalid
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isBadRequest());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_Unauthorized() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient().perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnauthorized());
}
@Test
public void deleteBitstreamsInBulk_Forbidden() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
}
@Test
public void deleteBitstreamsInBulk_collectionAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson col1Admin = EPersonBuilder.createEPerson(context)
.withEmail("col1admin@test.com")
.withPassword(password)
.build();
EPerson col2Admin = EPersonBuilder.createEPerson(context)
.withEmail("col2admin@test.com")
.withPassword(password)
.build();
Group col1_AdminGroup = collectionService.createAdministrators(context, col1);
Group col2_AdminGroup = collectionService.createAdministrators(context, col2);
groupService.addMember(context, col1_AdminGroup, col1Admin);
groupService.addMember(context, col2_AdminGroup, col2Admin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(col1Admin.getEmail(), password);
// Should return forbidden since one of the bitstreams does not originate form collection 1
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(2);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Change the token to the admin of collection 2
token = getAuthToken(col2Admin.getEmail(), password);
// Add three out of four bitstreams to the list of bitstreams to be deleted
ops = new ArrayList<>();
removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp1);
removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp2);
removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID());
ops.add(removeOp3);
patchBody = getPatchContent(ops);
// Should return forbidden since one of the bitstreams does not originate form collection 2
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(0);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
@Test
public void deleteBitstreamsInBulk_communityAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context)
.withEmail("parentComAdmin@test.com")
.withPassword(password)
.build();
Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity);
groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(parentCommunityAdmin.getEmail(), password);
// Bitstreams originate from two different collections, but those collections live in the same community, so
// a community admin should be able to delete them
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_OK) {
return false;
}
}
return true;
}
public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_NOT_FOUND) {
return false;
}
}
return true;
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -63,22 +64,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
//We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType))
//Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page
//Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page
// object
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(4)))
.andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.totalPages", is(1)))
.andExpect(jsonPath("$.page.number", is(0)))
//The array of browse index should have a size 4
.andExpect(jsonPath("$._embedded.browses", hasSize(4)))
//The array of browse index should have a size 5
.andExpect(jsonPath("$._embedded.browses", hasSize(5)))
//Check that all (and only) the default browse indexes are present
.andExpect(jsonPath("$._embedded.browses", containsInAnyOrder(
BrowseIndexMatcher.dateIssuedBrowseIndex("asc"),
BrowseIndexMatcher.contributorBrowseIndex("asc"),
BrowseIndexMatcher.titleBrowseIndex("asc"),
BrowseIndexMatcher.subjectBrowseIndex("asc")
BrowseIndexMatcher.subjectBrowseIndex("asc"),
BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")
)))
;
}
@@ -125,6 +127,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
;
}
@Test
public void findBrowseByVocabulary() throws Exception {
//Use srsc as this vocabulary is included by default
//When we call the root endpoint
getClient().perform(get("/api/discover/browses/srsc"))
//The status has to be 200 OK
.andExpect(status().isOk())
//We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType))
//Check that the JSON root matches the expected browse index
.andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")))
;
}
@Test
public void findBrowseBySubject() throws Exception {
//When we call the root endpoint
@@ -2142,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true)))
.andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)))
;
}
@@ -2159,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true)));
.andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)));
}
@Test

View File

@@ -153,6 +153,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums.add(title);
metadatums.add(author);
@@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums.add(issn);
metadatums.add(volume);
metadatums.add(issue);
metadatums.add(publisher);
ImportRecord firstrRecord = new ImportRecord(metadatums);
@@ -179,6 +182,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums2.add(title2);
metadatums2.add(author2);
@@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums2.add(issn2);
metadatums2.add(volume2);
metadatums2.add(issue2);
metadatums2.add(publisher2);
ImportRecord secondRecord = new ImportRecord(metadatums2);
records.add(firstrRecord);
@@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
return records;
}
}
}

View File

@@ -0,0 +1,677 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.dspace.app.rest.matcher.FacetEntryMatcher;
import org.dspace.app.rest.matcher.FacetValueMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.service.CollectionService;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This class tests the correct inheritance of Discovery configurations for sub communities and collections.
* To thoroughly test this, a community and collection structure is set up to where different communities have custom
* configurations configured for them.
*
* The following structure is uses:
* - Parent Community 1 - Custom configuration: discovery-parent-community-1
* -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1
* -- Collection 111 - Custom configuration: discovery-collection-1-1-1
* -- Collection 112
* -- Subcommunity 12
* -- Collection 121 - Custom configuration: discovery-collection-1-2-1
* -- Collection 122
* - Parent Community 2
* -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1
* -- Collection 211 - Custom configuration: discovery-collection-2-1-1
* -- Collection 212
* -- Subcommunity 22
* -- Collection 221 - Custom configuration: discovery-collection-2-2-1
* -- Collection 222
*
* Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is
* indexed and provided for the different search scopes.
*
* Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222,
* and one in collection 122 and 211.
*
* The tests will verify that for each object, the correct facets are provided and that all the necessary fields to
* power these facets are indexed properly.
*
* This file requires the discovery configuration in the following test file:
* src/test/data/dspaceFolder/config/spring/api/test-discovery.xml
*/
public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest {
@Autowired
CollectionService collectionService;
private Community parentCommunity1;
private Community subcommunity11;
private Community subcommunity12;
private Collection collection111;
private Collection collection112;
private Collection collection121;
private Collection collection122;
private Community parentCommunity2;
private Community subcommunity21;
private Community subcommunity22;
private Collection collection211;
private Collection collection212;
private Collection collection221;
private Collection collection222;
@Before
public void setUp() throws Exception {
super.setUp();
context.turnOffAuthorisationSystem();
MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build();
parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
.build();
subcommunity11 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1")
.build();
subcommunity12 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2")
.build();
collection111 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1")
.build();
collection112 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2")
.build();
collection121 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1")
.build();
collection122 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2")
.build();
parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
.build();
subcommunity21 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1")
.build();
subcommunity22 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2")
.build();
collection211 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1")
.build();
collection212 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2")
.build();
collection221 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1")
.build();
collection222 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2")
.build();
Item item111 = ItemBuilder.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-item111")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111")
.withMetadata("dc", "test", "collection111field", "collection111field-item111")
.withMetadata("dc", "test", "collection121field", "collection121field-item111")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111")
.withMetadata("dc", "test", "collection211field", "collection211field-item111")
.withMetadata("dc", "test", "collection221field", "collection221field-item111")
.build();
Item item112 = ItemBuilder.createItem(context, collection112)
.withMetadata("dc", "contributor", "author", "author-item112")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112")
.withMetadata("dc", "test", "collection111field", "collection111field-item112")
.withMetadata("dc", "test", "collection121field", "collection121field-item112")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112")
.withMetadata("dc", "test", "collection211field", "collection211field-item112")
.withMetadata("dc", "test", "collection221field", "collection221field-item112")
.build();
Item item121 = ItemBuilder.createItem(context, collection121)
.withMetadata("dc", "contributor", "author", "author-item121")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121")
.withMetadata("dc", "test", "collection111field", "collection111field-item121")
.withMetadata("dc", "test", "collection121field", "collection121field-item121")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121")
.withMetadata("dc", "test", "collection211field", "collection211field-item121")
.withMetadata("dc", "test", "collection221field", "collection221field-item121")
.build();
Item item122 = ItemBuilder.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-item122")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122")
.withMetadata("dc", "test", "collection111field", "collection111field-item122")
.withMetadata("dc", "test", "collection121field", "collection121field-item122")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122")
.withMetadata("dc", "test", "collection211field", "collection211field-item122")
.withMetadata("dc", "test", "collection221field", "collection221field-item122")
.build();
Item item211 = ItemBuilder.createItem(context, collection211)
.withMetadata("dc", "contributor", "author", "author-item211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211")
.withMetadata("dc", "test", "collection111field", "collection111field-item211")
.withMetadata("dc", "test", "collection121field", "collection121field-item211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211")
.withMetadata("dc", "test", "collection211field", "collection211field-item211")
.withMetadata("dc", "test", "collection221field", "collection221field-item211")
.build();
Item item212 = ItemBuilder.createItem(context, collection212)
.withMetadata("dc", "contributor", "author", "author-item212")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212")
.withMetadata("dc", "test", "collection111field", "collection111field-item212")
.withMetadata("dc", "test", "collection121field", "collection121field-item212")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212")
.withMetadata("dc", "test", "collection211field", "collection211field-item212")
.withMetadata("dc", "test", "collection221field", "collection221field-item212")
.build();
Item item221 = ItemBuilder.createItem(context, collection221)
.withMetadata("dc", "contributor", "author", "author-item221")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221")
.withMetadata("dc", "test", "collection111field", "collection111field-item221")
.withMetadata("dc", "test", "collection121field", "collection121field-item221")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221")
.withMetadata("dc", "test", "collection211field", "collection211field-item221")
.withMetadata("dc", "test", "collection221field", "collection221field-item221")
.build();
Item item222 = ItemBuilder.createItem(context, collection222)
.withMetadata("dc", "contributor", "author", "author-item222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222")
.withMetadata("dc", "test", "collection111field", "collection111field-item222")
.withMetadata("dc", "test", "collection121field", "collection121field-item222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222")
.withMetadata("dc", "test", "collection211field", "collection211field-item222")
.withMetadata("dc", "test", "collection221field", "collection221field-item222")
.build();
Item mappedItem111222 = ItemBuilder
.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-mappedItem111222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222")
.build();
Item mappedItem122211 = ItemBuilder
.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-mappedItem122211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211")
.build();
collectionService.addItem(context, collection222, mappedItem111222);
collectionService.addItem(context, collection211, mappedItem122211);
context.dispatchEvents();
context.restoreAuthSystemState();
}
@Test
/**
* Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item111", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item112", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem111222",
1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item111", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111.
*/
public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection111field", "text")))
);
getClient().perform(get("/api/discover/facets/collection111field")
.param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection111field",
"collection111field-item111", 1),
FacetValueMatcher.matchEntry("collection111field",
"collection111field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited
* correctly for Collection 112.
*/
public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Subcommunity 12.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121.
*/
public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection121field", "text")))
);
getClient().perform(get("/api/discover/facets/collection121field")
.param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection121field",
"collection121field-item121", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Collection 122.
*/
public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Parent Community 2.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item211", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211.
*/
public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection211field", "text")))
);
getClient().perform(get("/api/discover/facets/collection211field")
.param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection211field",
"collection211field-item211", 1),
FacetValueMatcher.matchEntry("collection211field",
"collection211field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited
* correctly for Collection 212.
*/
public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Subcommunity 22.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221.
*/
public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection221field", "text")))
);
getClient().perform(get("/api/discover/facets/collection221field")
.param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection221field",
"collection221field-item221", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Collection 222.
*/
public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
}

View File

@@ -88,7 +88,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT);
@@ -116,6 +116,41 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
}
}
@Test
public void createUnprocessableEntity_prefixContainingInvalidCharacters() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.convert(metadataSchema, Projection.DEFAULT);
metadataSchemaRest.setPrefix("test.SchemaName");
metadataSchemaRest.setNamespace(TEST_NAMESPACE);
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataSchemaRest.setPrefix("test,SchemaName");
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataSchemaRest.setPrefix("test SchemaName");
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test
public void createUnauthorizedTest()
throws Exception {
@@ -202,7 +237,7 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setPrefix(TEST_NAME);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
@@ -214,7 +249,33 @@ public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegratio
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED)));
.matchEntry(TEST_NAME, TEST_NAMESPACE_UPDATED)));
}
@Test
public void update_schemaNameShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadataschemas/" + metadataSchema.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME, TEST_NAMESPACE)));
}
@Test

View File

@@ -9,6 +9,7 @@ package org.dspace.app.rest;
import static com.jayway.jsonpath.JsonPath.read;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@@ -49,12 +50,12 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest {
private static final String ELEMENT = "test element";
private static final String QUALIFIER = "test qualifier";
private static final String ELEMENT = "test_element";
private static final String QUALIFIER = "test_qualifier";
private static final String SCOPE_NOTE = "test scope_note";
private static final String ELEMENT_UPDATED = "test element updated";
private static final String QUALIFIER_UPDATED = "test qualifier updated";
private static final String ELEMENT_UPDATED = "test_element_updated";
private static final String QUALIFIER_UPDATED = "test_qualifier_updated";
private static final String SCOPE_NOTE_UPDATED = "test scope_note updated";
private MetadataSchema metadataSchema;
@@ -564,6 +565,70 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.andExpect(status().isUnprocessableEntity());
}
@Test
public void findByFieldName_sortByFieldNameASC() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
"http://www.dspace.org/ns/aschema").build();
MetadataField metadataField1 = MetadataFieldBuilder
.createMetadataField(context, schema, "2", null, "AScopeNote").build();
MetadataField metadataField2 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", null, "AScopeNote").build();
MetadataField metadataField3 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", "a", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("query", schema.getName())
.param("sort", "fieldName,ASC"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.metadatafields", contains(
MetadataFieldMatcher.matchMetadataField(metadataField2),
MetadataFieldMatcher.matchMetadataField(metadataField3),
MetadataFieldMatcher.matchMetadataField(metadataField1)
)))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(3)));
}
@Test
public void findByFieldName_sortByFieldNameDESC() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema schema = MetadataSchemaBuilder.createMetadataSchema(context, "ASchema",
"http://www.dspace.org/ns/aschema").build();
MetadataField metadataField1 = MetadataFieldBuilder
.createMetadataField(context, schema, "2", null, "AScopeNote").build();
MetadataField metadataField2 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", null, "AScopeNote").build();
MetadataField metadataField3 = MetadataFieldBuilder
.createMetadataField(context, schema, "1", "a", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("query", schema.getName())
.param("sort", "fieldName,DESC"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.metadatafields", contains(
MetadataFieldMatcher.matchMetadataField(metadataField1),
MetadataFieldMatcher.matchMetadataField(metadataField3),
MetadataFieldMatcher.matchMetadataField(metadataField2)
)))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(3)));
}
@Test
public void createSuccess() throws Exception {
@@ -575,7 +640,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>();
try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue());
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
@@ -606,7 +672,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password);
Integer id = null;
try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue());
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
null), nullValue());
id = read(
getClient(authToken)
@@ -641,7 +708,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>();
try {
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue());
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
@@ -689,6 +757,94 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.andExpect(status().isUnauthorized());
}
@Test
public void createUnprocessableEntity_elementContainingInvalidCharacters() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement("testElement.ForCreate");
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String authToken = getAuthToken(admin.getEmail(), password);
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setElement("testElement,ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setElement("testElement ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test
public void createUnprocessableEntity_qualifierContainingInvalidCharacters() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier("testQualifier.ForCreate");
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String authToken = getAuthToken(admin.getEmail(), password);
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setQualifier("testQualifier,ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
metadataFieldRest.setQualifier("testQualifier ForCreate");
assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(),
metadataFieldRest.getQualifier()), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", String.valueOf(metadataSchema.getID()))
.param("projection", "full")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
}
@Test
public void createUnauthorizedEPersonNoAdminRights() throws Exception {
@@ -832,31 +988,81 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isOk());
}
@Test
public void update_elementShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT_UPDATED);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT, QUALIFIER)
));
}
@Test
public void update_qualifierShouldThrowError() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isOk());
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnprocessableEntity());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED)
));
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT, QUALIFIER)
));
}
@Test
public void update_checkUpdatedInIndex() throws Exception {
public void update_checkNotUpdatedInIndex() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
@@ -885,27 +1091,27 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isOk());
.andExpect(status().isUnprocessableEntity());
// new metadata field found in index
// new metadata field not found in index
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("schema", metadataSchema.getName())
.param("element", ELEMENT_UPDATED)
.param("qualifier", QUALIFIER_UPDATED))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
ELEMENT_UPDATED, QUALIFIER_UPDATED))
))
.andExpect(jsonPath("$.page.totalElements", is(1)));
.andExpect(jsonPath("$.page.totalElements", is(0)));
// original metadata field not found in index
// original metadata field found in index
getClient().perform(get(SEARCH_BYFIELDNAME_ENDPOINT)
.param("schema", metadataSchema.getName())
.param("element", metadataField.getElement())
.param("qualifier", metadataField.getQualifier()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.page.totalElements", is(0)));
.andExpect(jsonPath("$._embedded.metadatafields", Matchers.hasItem(
MetadataFieldMatcher.matchMetadataFieldByKeys(metadataSchema.getName(),
ELEMENT, QUALIFIER))
))
.andExpect(jsonPath("$.page.totalElements", is(1)));
}
@Test

View File

@@ -0,0 +1,213 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.impl.client.CloseableHttpClient;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Integration tests for {@link PubmedImportMetadataSourceServiceImpl}
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest {
@Autowired
private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl;
@Autowired
private LiveImportClientImpl liveImportClientImpl;
@Test
public void pubmedImportMetadataGetRecordsTest() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
@Test
public void pubmedImportMetadataGetRecords2Test() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords2();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
private ArrayList<ImportRecord> getRecords() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null,
"Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review.");
MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize"
+ " the main strategies for teaching clinical reasoning described in the literature in the context of"
+ " advanced clinical practice and promote new areas of research to improve the pedagogical approach"
+ " to clinical reasoning in Advanced Practice Nursing.");
MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and"
+ " clinical thinking are essential elements in the advanced nursing clinical practice decision-making"
+ " process. The quality improvement of care is related to the development of those skills."
+ " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical"
+ " reasoning in advanced clinical practice.");
MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was"
+ " conducted using the framework developed by Arksey and O'Malley as a research strategy."
+ " Consistent with the nature of scoping reviews, a study protocol has been established.");
MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and"
+ " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary"
+ " revision studies, published in biomedical databases, were selected, including qualitative ones."
+ " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID."
+ " Three authors independently evaluated the articles for titles, abstracts, and full text.");
MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined,"
+ " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility,"
+ " and 27 were included in the scoping review. The results that emerged from the review were"
+ " interpreted and grouped into three macro strategies (simulations-based education, art and visual"
+ " thinking, and other learning approaches) and nineteen educational interventions.");
MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different"
+ " strategies, the simulations are the most used. Despite this, our scoping review reveals that is"
+ " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic"
+ " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to"
+ " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to"
+ " acquire an adequate level of judgment and critical thinking. Therefore, it will be"
+ " necessary to relate teaching methodologies with the skills developed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana");
MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola");
MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina");
MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia");
MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing");
MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning");
MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking");
MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies");
MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education");
MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology");
metadatums.add(title);
metadatums.add(description1);
metadatums.add(description2);
metadatums.add(description3);
metadatums.add(description4);
metadatums.add(description5);
metadatums.add(description6);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(author3);
metadatums.add(author4);
metadatums.add(author5);
metadatums.add(author6);
metadatums.add(date);
metadatums.add(language);
metadatums.add(subject1);
metadatums.add(subject2);
metadatums.add(subject3);
metadatums.add(subject4);
metadatums.add(subject5);
metadatums.add(subject6);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
private ArrayList<ImportRecord> getRecords2() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez.");
MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely"
+ " used interfaces for the retrieval of information from biological databases is the NCBI Entrez"
+ " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between"
+ " the individual entries found in numerous public databases. The existence of such natural"
+ " connections, mostly biological in nature, argued for the development of a method through which"
+ " all the information about a particular biological entity could be found without having to"
+ " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based"
+ " searches, illustrating the types of information that can be retrieved through the Entrez system."
+ " An alternate protocol builds upon the first basic protocol, using additional,"
+ " built-in features of the Entrez system, and providing alternative ways to issue the initial query."
+ " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure"
+ " visualization tool, is also discussed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
metadatums.add(title);
metadatums.add(description);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(date);
metadatums.add(language);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
}

View File

@@ -257,10 +257,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
Matchers.containsString("page=1"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(6)))
.andExpect(jsonPath("$.page.totalPages", is(6)))
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(0)));
getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions")
@@ -268,7 +268,7 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
.param("page", "1"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable")))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("test-hidden")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
@@ -285,8 +285,8 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page="), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(6)))
.andExpect(jsonPath("$.page.totalPages", is(6)))
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(1)));
getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions")
@@ -294,30 +294,56 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
.param("page", "2"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess")))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("accessConditionNotDiscoverable")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.prev.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=1"), Matchers.containsString("size=1"))))
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=1"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.next.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.self.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=2"), Matchers.containsString("size=1"))))
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=2"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(6)))
.andExpect(jsonPath("$.page.totalPages", is(6)))
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(2)));
getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions")
.param("size", "1")
.param("page", "3"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("languagetestprocess")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.prev.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=2"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.next.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=4"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.self.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(3)));
getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions")
.param("size", "1")
.param("page", "3"))
.param("page", "4"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("qualdroptest")))
@@ -326,24 +352,24 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.prev.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=2"), Matchers.containsString("size=1"))))
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.next.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=4"), Matchers.containsString("size=1"))))
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.self.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
Matchers.containsString("page=4"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(6)))
.andExpect(jsonPath("$.page.totalPages", is(6)))
.andExpect(jsonPath("$.page.number", is(3)));
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(4)));
getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions")
.param("size", "1")
.param("page", "4"))
.param("page", "5"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissiondefinitions[0].id", is("extractiontestprocess")))
@@ -352,20 +378,20 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra
Matchers.containsString("page=0"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.prev.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=3"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.next.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
Matchers.containsString("page=4"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.next.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.self.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=4"), Matchers.containsString("size=1"))))
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissiondefinitions?"),
Matchers.containsString("page=5"), Matchers.containsString("size=1"))))
Matchers.containsString("page=6"), Matchers.containsString("size=1"))))
.andExpect(jsonPath("$.page.size", is(1)))
.andExpect(jsonPath("$.page.totalElements", is(6)))
.andExpect(jsonPath("$.page.totalPages", is(6)))
.andExpect(jsonPath("$.page.number", is(4)));
.andExpect(jsonPath("$.page.totalElements", is(7)))
.andExpect(jsonPath("$.page.totalPages", is(7)))
.andExpect(jsonPath("$.page.number", is(5)));
}
}

View File

@@ -67,13 +67,13 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(content().contentType(contentType))
//The configuration file for the test env includes 6 forms
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(1)))
.andExpect(jsonPath("$.page.number", is(0)))
.andExpect(
jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms")))
//The array of submissionforms should have a size of 8
.andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8))))
.andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10))))
;
}
@@ -84,12 +84,12 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(1)))
.andExpect(jsonPath("$.page.number", is(0)))
.andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL
+ "config/submissionforms")))
.andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8))));
.andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(10))));
}
@Test
@@ -696,10 +696,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
Matchers.containsString("page=1"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalPages", equalTo(4)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(5)))
.andExpect(jsonPath("$.page.number", is(0)));
getClient(tokenAdmin).perform(get("/api/config/submissionforms")
@@ -707,8 +707,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.param("page", "1"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("languagetest")))
.andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest")))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-workflow-hidden")))
.andExpect(jsonPath("$._embedded.submissionforms[1].id", is("languagetest")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=0"), Matchers.containsString("size=2"))))
@@ -723,10 +723,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
Matchers.containsString("page=2"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalPages", equalTo(4)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(5)))
.andExpect(jsonPath("$.page.number", is(1)));
getClient(tokenAdmin).perform(get("/api/config/submissionforms")
@@ -734,8 +734,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.param("page", "2"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo")))
.andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority")))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("test-outside-submission-hidden")))
.andExpect(jsonPath("$._embedded.submissionforms[1].id", is("qualdroptest")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=0"), Matchers.containsString("size=2"))))
@@ -747,10 +747,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
Matchers.containsString("page=2"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalPages", equalTo(4)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(5)))
.andExpect(jsonPath("$.page.number", is(2)));
getClient(tokenAdmin).perform(get("/api/config/submissionforms")
@@ -758,7 +758,8 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.param("page", "3"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone")))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpagetwo")))
.andExpect(jsonPath("$._embedded.submissionforms[1].id", is("sampleauthority")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=0"), Matchers.containsString("size=2"))))
@@ -770,10 +771,33 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", equalTo(8)))
.andExpect(jsonPath("$.page.totalPages", equalTo(4)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(5)))
.andExpect(jsonPath("$.page.number", is(3)));
getClient(tokenAdmin).perform(get("/api/config/submissionforms")
.param("size", "2")
.param("page", "4"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.submissionforms[0].id", is("traditionalpageone")))
.andExpect(jsonPath("$._links.first.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=0"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.prev.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=3"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.self.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$._links.last.href", Matchers.allOf(
Matchers.containsString("/api/config/submissionforms?"),
Matchers.containsString("page=4"), Matchers.containsString("size=2"))))
.andExpect(jsonPath("$.page.size", is(2)))
.andExpect(jsonPath("$.page.totalElements", equalTo(10)))
.andExpect(jsonPath("$.page.totalPages", equalTo(5)))
.andExpect(jsonPath("$.page.number", is(4)));
}
}

View File

@@ -2122,4 +2122,35 @@ public class WorkflowItemRestRepositoryIT extends AbstractControllerIntegrationT
WorkflowItemBuilder.deleteWorkflowItem(idRef.get());
}
}
@Test
public void testWorkflowWithHiddenSections() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden")
.withName("Collection 1")
.withWorkflowGroup(1, eperson)
.build();
XmlWorkflowItem workflowItem = WorkflowItemBuilder.createWorkflowItem(context, collection)
.withTitle("Workflow Item")
.build();
context.restoreAuthSystemState();
getClient(getAuthToken(admin.getEmail(), password))
.perform(get("/api/workflow/workflowitems/" + workflowItem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.sections.test-outside-workflow-hidden").exists())
.andExpect(jsonPath("$.sections.test-outside-submission-hidden").doesNotExist())
.andExpect(jsonPath("$.sections.test-never-hidden").exists())
.andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist());
}
}

View File

@@ -8566,4 +8566,41 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
)));
}
@Test
public void testSubmissionWithHiddenSections() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity, "123456789/test-hidden")
.withName("Collection 1")
.build();
WorkspaceItem workspaceItem = WorkspaceItemBuilder.createWorkspaceItem(context, collection)
.withTitle("Workspace Item")
.withIssueDate("2023-01-01")
.withType("book")
.build();
context.restoreAuthSystemState();
String adminToken = getAuthToken(admin.getEmail(), password);
getClient(adminToken)
.perform(get("/api/submission/workspaceitems/" + workspaceItem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.sections.test-outside-workflow-hidden").doesNotExist())
.andExpect(jsonPath("$.sections.test-outside-submission-hidden").exists())
.andExpect(jsonPath("$.sections.test-never-hidden").exists())
.andExpect(jsonPath("$.sections.test-always-hidden").doesNotExist());
// Deposit the item
getClient(adminToken).perform(post("/api/workflow/workflowitems")
.content("/api/submission/workspaceitems/" + workspaceItem.getID())
.contentType(textUriContentType))
.andExpect(status().isCreated());
}
}

View File

@@ -8,6 +8,9 @@
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains;
@@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
/**
* Utility class to construct a Matcher for a browse index
@@ -31,7 +33,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> subjectBrowseIndex(final String order) {
return allOf(
hasJsonPath("$.metadata", contains("dc.subject.*")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -44,7 +47,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> titleBrowseIndex(final String order) {
return allOf(
hasJsonPath("$.metadata", contains("dc.title")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("title")),
hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -56,7 +60,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> contributorBrowseIndex(final String order) {
return allOf(
hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -69,7 +74,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> dateIssuedBrowseIndex(final String order) {
return allOf(
hasJsonPath("$.metadata", contains("dc.date.issued")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("date")),
hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -77,4 +83,22 @@ public class BrowseIndexMatcher {
hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items"))
);
}
public static Matcher<? super Object> hierarchicalBrowseIndex(final String vocabulary) {
return allOf(
hasJsonPath("$.metadata", contains("dc.subject")),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.facetType", equalToIgnoringCase("subject")),
hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)),
hasJsonPath("$._links.vocabulary.href",
is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))),
hasJsonPath("$._links.items.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))),
hasJsonPath("$._links.entries.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))),
hasJsonPath("$._links.self.href",
is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary)))
);
}
}

View File

@@ -110,6 +110,17 @@ public class FacetEntryMatcher {
);
}
public static Matcher<? super Object> matchFacet(boolean hasNext, String name, String facetType) {
return allOf(
hasJsonPath("$.name", is(name)),
hasJsonPath("$.facetType", is(facetType)),
hasJsonPath("$.facetLimit", any(Integer.class)),
hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)),
hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name))
);
}
/**
* Check that a facet over the dc.type exists and match the default configuration
*

View File

@@ -60,6 +60,16 @@ public class FacetValueMatcher {
);
}
public static Matcher<? super Object> matchEntry(String facet, String label, int count) {
return allOf(
hasJsonPath("$.label", is(label)),
hasJsonPath("$.type", is("discover")),
hasJsonPath("$.count", is(count)),
hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")),
hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals"))
);
}
public static Matcher<? super Object> entrySubjectWithAuthority(String label, String authority, int count) {
return allOf(

View File

@@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest {
sortConfiguration.setSortFields(listSortField);
sortConfiguration.setDefaultSortField(defaultSort);
discoveryConfiguration.setSearchSortConfiguration(sortConfiguration);
DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet();
@@ -167,6 +169,16 @@ public class RestDiscoverQueryBuilderTest {
page.getOffset(), "SCORE", "ASC");
}
@Test
public void testSortByDefaultSortField() throws Exception {
page = PageRequest.of(2, 10);
restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page);
verify(discoverQueryBuilder, times(1))
.buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(),
page.getPageSize(), page.getOffset(), null, null);
}
@Test(expected = DSpaceBadRequestException.class)
public void testCatchIllegalArgumentException() throws Exception {
when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(),

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE eSearchResult PUBLIC "-//NLM//DTD esearch 20060628//EN" "https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd">
<eSearchResult>
<Count>1</Count>
<RetMax>1</RetMax>
<RetStart>0</RetStart>
<QueryKey>1</QueryKey>
<WebEnv>MCID_64784b5ab65e3b2b2253cd3a</WebEnv>
<IdList>
<Id>36708638</Id>
</IdList>
<TranslationSet/>
<QueryTranslation>"10 1016 j nepr 2023 103548"[All Fields]</QueryTranslation>
</eSearchResult>

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE eSearchResult PUBLIC "-//NLM//DTD esearch 20060628//EN" "https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd">
<eSearchResult>
<Count>1</Count>
<RetMax>1</RetMax>
<RetStart>0</RetStart>
<QueryKey>1</QueryKey>
<WebEnv>MCID_64784b12ccf058150336d6a8</WebEnv>
<IdList>
<Id>21975942</Id>
</IdList>
<TranslationSet/>
<QueryTranslation>"10 1002 0471142905 hg0610s71"[All Fields]</QueryTranslation>
</eSearchResult>

View File

@@ -0,0 +1,194 @@
<?xml version="1.0" ?>
<!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2023//EN" "https://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_230101.dtd">
<PubmedArticleSet>
<PubmedArticle>
<MedlineCitation Status="MEDLINE" Owner="NLM" IndexingMethod="Automated">
<PMID Version="1">36708638</PMID>
<DateCompleted>
<Year>2023</Year>
<Month>02</Month>
<Day>23</Day>
</DateCompleted>
<DateRevised>
<Year>2023</Year>
<Month>02</Month>
<Day>23</Day>
</DateRevised>
<Article PubModel="Print-Electronic">
<Journal>
<ISSN IssnType="Electronic">1873-5223</ISSN>
<JournalIssue CitedMedium="Internet">
<Volume>67</Volume>
<PubDate>
<Year>2023</Year>
<Month>Feb</Month>
</PubDate>
</JournalIssue>
<Title>Nurse education in practice</Title>
<ISOAbbreviation>Nurse Educ Pract</ISOAbbreviation>
</Journal>
<ArticleTitle>Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review.</ArticleTitle>
<Pagination>
<StartPage>103548</StartPage>
<MedlinePgn>103548</MedlinePgn>
</Pagination>
<ELocationID EIdType="doi" ValidYN="Y">10.1016/j.nepr.2023.103548</ELocationID>
<ELocationID EIdType="pii" ValidYN="Y">S1471-5953(23)00010-0</ELocationID>
<Abstract>
<AbstractText Label="AIM/OBJECTIVE" NlmCategory="OBJECTIVE">To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing.</AbstractText>
<AbstractText Label="BACKGROUND" NlmCategory="BACKGROUND">Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice.</AbstractText>
<AbstractText Label="DESIGN" NlmCategory="METHODS">A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established.</AbstractText>
<AbstractText Label="METHODS" NlmCategory="METHODS">The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text.</AbstractText>
<AbstractText Label="RESULTS" NlmCategory="RESULTS">1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions.</AbstractText>
<AbstractText Label="CONCLUSIONS" NlmCategory="CONCLUSIONS">Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed.</AbstractText>
<CopyrightInformation>Copyright &#xa9; 2023 Elsevier Ltd. All rights reserved.</CopyrightInformation>
</Abstract>
<AuthorList CompleteYN="Y">
<Author ValidYN="Y">
<LastName>Giuffrida</LastName>
<ForeName>Silvia</ForeName>
<Initials>S</Initials>
<AffiliationInfo>
<Affiliation>Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Silano</LastName>
<ForeName>Verdiana</ForeName>
<Initials>V</Initials>
<AffiliationInfo>
<Affiliation>Nursing Direction of Settore Anziani Citt&#xe0; di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Ramacciati</LastName>
<ForeName>Nicola</ForeName>
<Initials>N</Initials>
<AffiliationInfo>
<Affiliation>Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Prandi</LastName>
<ForeName>Cesarina</ForeName>
<Initials>C</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Baldon</LastName>
<ForeName>Alessia</ForeName>
<Initials>A</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Bianchi</LastName>
<ForeName>Monica</ForeName>
<Initials>M</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
</AuthorList>
<Language>eng</Language>
<PublicationTypeList>
<PublicationType UI="D016428">Journal Article</PublicationType>
<PublicationType UI="D016454">Review</PublicationType>
</PublicationTypeList>
<ArticleDate DateType="Electronic">
<Year>2023</Year>
<Month>01</Month>
<Day>17</Day>
</ArticleDate>
</Article>
<MedlineJournalInfo>
<Country>Scotland</Country>
<MedlineTA>Nurse Educ Pract</MedlineTA>
<NlmUniqueID>101090848</NlmUniqueID>
<ISSNLinking>1471-5953</ISSNLinking>
</MedlineJournalInfo>
<CitationSubset>IM</CitationSubset>
<MeshHeadingList>
<MeshHeading>
<DescriptorName UI="D006801" MajorTopicYN="N">Humans</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D057179" MajorTopicYN="Y">Advanced Practice Nursing</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D007858" MajorTopicYN="N">Learning</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D003479" MajorTopicYN="N">Curriculum</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D013850" MajorTopicYN="N">Thinking</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D000086723" MajorTopicYN="N">Clinical Reasoning</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D013338" MajorTopicYN="Y">Students, Nursing</DescriptorName>
</MeshHeading>
</MeshHeadingList>
<KeywordList Owner="NOTNLM">
<Keyword MajorTopicYN="N">Advanced practice nursing</Keyword>
<Keyword MajorTopicYN="N">Clinical reasoning</Keyword>
<Keyword MajorTopicYN="N">Critical thinking</Keyword>
<Keyword MajorTopicYN="N">Educational strategies</Keyword>
<Keyword MajorTopicYN="N">Nursing education</Keyword>
<Keyword MajorTopicYN="N">Teaching methodology</Keyword>
</KeywordList>
<CoiStatement>Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper.</CoiStatement>
</MedlineCitation>
<PubmedData>
<History>
<PubMedPubDate PubStatus="received">
<Year>2022</Year>
<Month>11</Month>
<Day>9</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="revised">
<Year>2022</Year>
<Month>12</Month>
<Day>17</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="accepted">
<Year>2023</Year>
<Month>1</Month>
<Day>10</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="pubmed">
<Year>2023</Year>
<Month>1</Month>
<Day>29</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="medline">
<Year>2023</Year>
<Month>2</Month>
<Day>25</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="entrez">
<Year>2023</Year>
<Month>1</Month>
<Day>28</Day>
<Hour>18</Hour>
<Minute>7</Minute>
</PubMedPubDate>
</History>
<PublicationStatus>ppublish</PublicationStatus>
<ArticleIdList>
<ArticleId IdType="pubmed">36708638</ArticleId>
<ArticleId IdType="doi">10.1016/j.nepr.2023.103548</ArticleId>
<ArticleId IdType="pii">S1471-5953(23)00010-0</ArticleId>
</ArticleIdList>
</PubmedData>
</PubmedArticle>
</PubmedArticleSet>

View File

@@ -0,0 +1,132 @@
<?xml version="1.0" ?>
<!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2023//EN" "https://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_230101.dtd">
<PubmedArticleSet>
<PubmedArticle>
<MedlineCitation Status="MEDLINE" Owner="NLM">
<PMID Version="1">21975942</PMID>
<DateCompleted>
<Year>2012</Year>
<Month>01</Month>
<Day>13</Day>
</DateCompleted>
<DateRevised>
<Year>2016</Year>
<Month>10</Month>
<Day>21</Day>
</DateRevised>
<Article PubModel="Print">
<Journal>
<ISSN IssnType="Electronic">1934-8258</ISSN>
<JournalIssue CitedMedium="Internet">
<Volume>Chapter 6</Volume>
<PubDate>
<Year>2011</Year>
<Month>Oct</Month>
</PubDate>
</JournalIssue>
<Title>Current protocols in human genetics</Title>
<ISOAbbreviation>Curr Protoc Hum Genet</ISOAbbreviation>
</Journal>
<ArticleTitle>Searching NCBI Databases Using Entrez.</ArticleTitle>
<Pagination>
<StartPage>Unit6.10</StartPage>
<MedlinePgn>Unit6.10</MedlinePgn>
</Pagination>
<ELocationID EIdType="doi" ValidYN="Y">10.1002/0471142905.hg0610s71</ELocationID>
<Abstract>
<AbstractText>One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed.</AbstractText>
<CopyrightInformation>&#xa9; 2011 by John Wiley &amp; Sons, Inc.</CopyrightInformation>
</Abstract>
<AuthorList CompleteYN="Y">
<Author ValidYN="Y">
<LastName>Gibney</LastName>
<ForeName>Gretchen</ForeName>
<Initials>G</Initials>
</Author>
<Author ValidYN="Y">
<LastName>Baxevanis</LastName>
<ForeName>Andreas D</ForeName>
<Initials>AD</Initials>
</Author>
</AuthorList>
<Language>eng</Language>
<PublicationTypeList>
<PublicationType UI="D016428">Journal Article</PublicationType>
</PublicationTypeList>
</Article>
<MedlineJournalInfo>
<Country>United States</Country>
<MedlineTA>Curr Protoc Hum Genet</MedlineTA>
<NlmUniqueID>101287858</NlmUniqueID>
<ISSNLinking>1934-8258</ISSNLinking>
</MedlineJournalInfo>
<CitationSubset>IM</CitationSubset>
<MeshHeadingList>
<MeshHeading>
<DescriptorName UI="D000818" MajorTopicYN="N">Animals</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D003628" MajorTopicYN="N">Database Management Systems</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D016208" MajorTopicYN="Y">Databases, Factual</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D006801" MajorTopicYN="N">Humans</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D016247" MajorTopicYN="N">Information Storage and Retrieval</DescriptorName>
<QualifierName UI="Q000379" MajorTopicYN="Y">methods</QualifierName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D020407" MajorTopicYN="Y">Internet</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D008968" MajorTopicYN="N">Molecular Conformation</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D009317" MajorTopicYN="N">National Library of Medicine (U.S.)</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D039781" MajorTopicYN="N">PubMed</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D014481" MajorTopicYN="N" Type="Geographic">United States</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D014584" MajorTopicYN="Y">User-Computer Interface</DescriptorName>
</MeshHeading>
</MeshHeadingList>
</MedlineCitation>
<PubmedData>
<History>
<PubMedPubDate PubStatus="entrez">
<Year>2011</Year>
<Month>10</Month>
<Day>7</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="pubmed">
<Year>2011</Year>
<Month>10</Month>
<Day>7</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="medline">
<Year>2012</Year>
<Month>1</Month>
<Day>14</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
</History>
<PublicationStatus>ppublish</PublicationStatus>
<ArticleIdList>
<ArticleId IdType="pubmed">21975942</ArticleId>
<ArticleId IdType="doi">10.1002/0471142905.hg0610s71</ArticleId>
</ArticleIdList>
</PubmedData>
</PubmedArticle>
</PubmedArticleSet>

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.servicemanager;
import static org.apache.logging.log4j.Level.DEBUG;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
@@ -21,6 +23,8 @@ import java.util.Map;
import javax.annotation.PreDestroy;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.kernel.Activator;
import org.dspace.kernel.config.SpringLoader;
import org.dspace.kernel.mixins.ConfigChangeListener;
@@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceChangeListener;
import org.dspace.kernel.mixins.ServiceManagerReadyAware;
import org.dspace.servicemanager.config.DSpaceConfigurationService;
import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.dspace.utils.CallStackUtils;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
@@ -44,7 +47,7 @@ import org.springframework.context.support.ClassPathXmlApplicationContext;
*/
public final class DSpaceServiceManager implements ServiceManagerSystem {
private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class);
private static Logger log = LogManager.getLogger();
public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml";
public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml";
@@ -426,9 +429,10 @@ public final class DSpaceServiceManager implements ServiceManagerSystem {
service = (T) applicationContext.getBean(name, type);
} catch (BeansException e) {
// no luck, try the fall back option
log.warn(
log.debug(
"Unable to locate bean by name or id={}."
+ " Will try to look up bean by type next.", name, e);
+ " Will try to look up bean by type next.", name);
CallStackUtils.logCaller(log, DEBUG);
service = null;
}
} else {
@@ -437,8 +441,9 @@ public final class DSpaceServiceManager implements ServiceManagerSystem {
service = (T) applicationContext.getBean(type.getName(), type);
} catch (BeansException e) {
// no luck, try the fall back option
log.warn("Unable to locate bean by name or id={}."
+ " Will try to look up bean by type next.", type.getName(), e);
log.debug("Unable to locate bean by name or id={}."
+ " Will try to look up bean by type next.", type::getName);
CallStackUtils.logCaller(log, DEBUG);
service = null;
}
}

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.utils;
import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE;
import java.lang.StackWalker.StackFrame;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
/**
* Utility methods for manipulating call stacks.
*
* @author mwood
*/
public class CallStackUtils {
private CallStackUtils() {}
/**
* Log the class, method and line of the caller's caller.
*
* @param log logger to use.
* @param level log at this level, if enabled.
*/
static public void logCaller(Logger log, Level level) {
if (log.isEnabled(level)) {
StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE);
StackFrame caller = stack.walk(stream -> stream.skip(2)
.findFirst()
.get());
String callerClassName = caller.getDeclaringClass().getCanonicalName();
String callerMethodName = caller.getMethodName();
int callerLine = caller.getLineNumber();
log.log(level, "Called from {}.{} line {}.",
callerClassName, callerMethodName, callerLine);
}
}
}

View File

@@ -75,6 +75,9 @@
<xsl:apply-templates
select="doc:metadata/doc:element[@name='dc']/doc:element[@name='identifier']/doc:element[@name='uri']"
mode="datacite"/>
<!-- ACCESS RIGHTS from access status mechanism -->
<xsl:apply-templates
select="doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']" mode="datacite" />
<!-- datacite:rights -->
<xsl:apply-templates
select="doc:metadata/doc:element[@name='dc']/doc:element[@name='rights']" mode="datacite"/>
@@ -658,6 +661,40 @@
</xsl:if>
</xsl:template>
<!-- from Access Status mechanism -->
<!-- datacite:rights -->
<!-- https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_accessrights.html -->
<xsl:template match="doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']" mode="datacite">
<xsl:variable name="rightsValue">
<xsl:call-template name="resolveRightsName">
<xsl:with-param name="field" select="text()"/>
</xsl:call-template>
</xsl:variable>
<xsl:variable name="rightsURI">
<xsl:call-template name="resolveRightsURI">
<xsl:with-param name="field" select="text()"/>
</xsl:call-template>
</xsl:variable>
<xsl:variable name="lc_rightsValue">
<xsl:call-template name="lowercase">
<xsl:with-param name="value" select="$rightsValue"/>
</xsl:call-template>
</xsl:variable>
<!-- We are checking to ensure that only values ending in "access" can be used as datacite:rights.
This is a valid solution as we pre-normalize dc.rights values in openaire4.xsl to end in the term
"access" according to COAR Controlled Vocabulary -->
<xsl:if test="ends-with($lc_rightsValue,'access')">
<datacite:rights>
<xsl:if test="$rightsURI">
<xsl:attribute name="rightsURI">
<xsl:value-of select="$rightsURI"/>
</xsl:attribute>
</xsl:if>
<xsl:value-of select="$rightsValue"/>
</datacite:rights>
</xsl:if>
</xsl:template>
<!-- datacite:subjects -->
<!-- https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_subject.html -->
@@ -1125,11 +1162,11 @@
<!-- Auxiliary templates - get global values -->
<!-- -->
<!-- get the coar access rights globally -->
<!-- get the coar access rights globally from access status mechanism -->
<xsl:template name="getRightsURI">
<xsl:call-template name="resolveRightsURI">
<xsl:with-param name="field"
select="//doc:element[@name='dc']/doc:element[@name='rights']/doc:element/doc:field[@name='value'and ends-with(translate(text(), $uppercase, $smallcase),'access')]/text()"/>
select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/>
</xsl:call-template>
</xsl:template>
@@ -1207,7 +1244,7 @@
</xsl:element>
</xsl:if>
</xsl:template>
<!--
This template will recursively create the field name based on parent node names
to be something like this:
@@ -1602,6 +1639,37 @@
</xsl:choose>
</xsl:template>
<!--
This template will return the COAR Access Right Vocabulary Names in English
like "open access"
based on the values from DSpace Access Status mechanism like String 'open.access'
please check class org.dspace.access.status.DefaultAccessStatusHelper for more information
https://openaire-guidelines-for-literature-repository-managers.readthedocs.io/en/v4.0.0/field_accessrights.html#definition-and-usage-instruction
-->
<xsl:template name="resolveRightsName">
<xsl:param name="field"/>
<xsl:variable name="lc_value">
<xsl:call-template name="lowercase">
<xsl:with-param name="value" select="$field"/>
</xsl:call-template>
</xsl:variable>
<xsl:choose>
<xsl:when test="$lc_value = 'open.access'">
<xsl:text>open access</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'embargo'">
<xsl:text>embargoed access</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'restricted'">
<xsl:text>restricted access</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'metadata.only'">
<xsl:text>metadata only access</xsl:text>
</xsl:when>
<xsl:otherwise/>
</xsl:choose>
</xsl:template>
<!--
This template will return the COAR Access Right Vocabulary URI
like http://purl.org/coar/access_right/c_abf2
@@ -1616,16 +1684,16 @@
</xsl:call-template>
</xsl:variable>
<xsl:choose>
<xsl:when test="$lc_value = 'open access'">
<xsl:when test="$lc_value = 'open access' or $lc_value = 'open.access'">
<xsl:text>http://purl.org/coar/access_right/c_abf2</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'embargoed access'">
<xsl:when test="$lc_value = 'embargoed access' or $lc_value = 'embargo'">
<xsl:text>http://purl.org/coar/access_right/c_f1cf</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'restricted access'">
<xsl:when test="$lc_value = 'restricted access' or $lc_value = 'restricted'">
<xsl:text>http://purl.org/coar/access_right/c_16ec</xsl:text>
</xsl:when>
<xsl:when test="$lc_value = 'metadata only access'">
<xsl:when test="$lc_value = 'metadata only access' or $lc_value = 'metadata.only'">
<xsl:text>http://purl.org/coar/access_right/c_14cb</xsl:text>
</xsl:when>
<xsl:otherwise/>

View File

@@ -79,7 +79,6 @@
<heading></heading>
<processing-class>org.dspace.app.rest.submit.step.CollectionStep</processing-class>
<type>collection</type>
<scope visibility="hidden" visibilityOutside="hidden">submission</scope>
</step-definition>
<!-- The following set of DescribeStep <step-definition>s all point to forms (of the same name) which are

View File

@@ -25,6 +25,10 @@ rest.projections.full.max = 2
# This property determines the max embed depth for a SpecificLevelProjection
rest.projection.specificLevel.maxEmbed = 5
# This property determines the max amount of rest operations that can be performed at the same time, for example when
# batch removing bitstreams. The default value is set to 1000.
rest.patch.operations.limit = 1000
# Define which configuration properties are exposed through the http://<dspace.server.url>/api/config/properties/
# rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will
# respond that the property wasn't found. This property can be defined multiple times to allow access to multiple

View File

@@ -30,6 +30,7 @@
<entry key-ref="crossref.volume" value-ref="crossrefVolume" />
<entry key-ref="crossref.issue" value-ref="crossrefIssue" />
<entry key-ref="crossref.abstract" value-ref="crossrefAbstract" />
<entry key-ref="crossref.publisher" value-ref="crossrefPublisher" />
</util:map>
<bean id="crossrefIDContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleJsonPathMetadataContributor">
@@ -137,6 +138,14 @@
<constructor-arg value="dc.description.abstract"/>
</bean>
<bean id="crossrefPublisher" class="org.dspace.importer.external.metadatamapping.contributor.SimpleJsonPathMetadataContributor">
<property name="field" ref="crossref.publisher"/>
<property name="query" value="/publisher"/>
</bean>
<bean id="crossref.publisher" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.publisher"/>
</bean>
<bean class="java.lang.Integer" id="maxRetry">
<constructor-arg value="3"/>
</bean>

View File

@@ -57,9 +57,6 @@
<!-- Same as the "default" configuration, but does NOT filter out older versions of items -->
<!-- Used to display related items on single-item pages, because a relationship does not always point to the latest version of the related item -->
<entry key="default-relationships" value-ref="defaultRelationshipsConfiguration" />
<!--Use site to override the default configuration for the home page & default discovery page-->
<entry key="site" value-ref="homepageConfiguration" />
<!--<entry key="123456789/7621" value-ref="defaultConfiguration"/>-->
<!-- Used to show filters and results on MyDSpace -->
<!-- Do not change the id of special entries or else they won't work -->
@@ -725,121 +722,6 @@
<property name="spellCheckEnabled" value="true"/>
</bean>
<!--The Homepage specific configuration settings for discovery-->
<bean id="homepageConfiguration" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<!--Which sidebar facets are to be displayed (same as defaultConfiguration above)-->
<property name="sidebarFacets">
<list>
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterEntityType"/>
</list>
</property>
<!-- Set TagCloud configuration per discovery configuration -->
<property name="tagCloudFacetConfiguration" ref="homepageTagCloudFacetConfiguration"/>
<!--The search filters which can be used on the discovery search page (same as defaultConfiguration above)-->
<property name="searchFilters">
<list>
<ref bean="searchFilterTitle" />
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterFileNameInOriginalBundle" />
<ref bean="searchFilterFileDescriptionInOriginalBundle" />
<ref bean="searchFilterEntityType"/>
<ref bean="searchFilterIsAuthorOfPublicationRelation"/>
<ref bean="searchFilterIsProjectOfPublicationRelation"/>
<ref bean="searchFilterIsOrgUnitOfPublicationRelation"/>
<ref bean="searchFilterIsPublicationOfJournalIssueRelation"/>
<ref bean="searchFilterIsJournalOfPublicationRelation"/>
</list>
</property>
<!--The sort filters for the discovery search (same as defaultConfiguration above)-->
<property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<property name="sortFields">
<list>
<ref bean="sortScore" />
<ref bean="sortTitle" />
<ref bean="sortDateIssued" />
<ref bean="sortDateAccessioned" />
</list>
</property>
</bean>
</property>
<!--Any default filter queries, these filter queries will be used for all
queries done by discovery for this configuration-->
<property name="defaultFilterQueries">
<list>
<!--Only find items, communities and collections-->
<value>(search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community</value>
<value>-withdrawn:true AND -discoverable:false</value>
</list>
</property>
<!-- Limit recent submissions on homepage to only 5 (default is 20) -->
<property name="recentSubmissionConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryRecentSubmissionsConfiguration">
<property name="metadataSortField" value="dc.date.accessioned" />
<property name="type" value="date"/>
<property name="max" value="5"/>
<property name="useAsHomePage" value="false"/>
</bean>
</property>
<property name="hitHighlightingConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightingConfiguration">
<property name="metadataFields">
<list>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.contributor.author"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dspace.entity.type"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="person.identifier.jobtitle"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.description.abstract"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="2"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.title"/>
<property name="snippets" value="5"/>
</bean>
<!-- By default, full text snippets are disabled, as snippets of embargoed/restricted bitstreams
may appear in search results when the Item is public. See DS-3498
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.status"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.description"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="5"/>
</bean>
-->
</list>
</property>
</bean>
</property>
<!-- When true a "did you mean" example will be displayed, value can be true or false -->
<property name="spellCheckEnabled" value="true"/>
</bean>
<!--The workspace configuration settings for discovery -->
<bean id="workspaceConfiguration"
@@ -865,8 +747,11 @@
<!--The sort filters for the discovery search-->
<property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<!--The default sort filter to use for the initial workspace loading-->
<property name="defaultSortField" ref="sortLastModified" />
<property name="sortFields">
<list>
<ref bean="sortLastModified" />
<ref bean="sortScore" />
<ref bean="sortTitle" />
<ref bean="sortDateIssued" />
@@ -938,6 +823,8 @@
<!--The sort filters for the discovery search-->
<property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<!--The default sort filter to use for the initial workspace loading-->
<property name="defaultSortField" ref="sortLastModified" />
<property name="sortFields">
<list>
<ref bean="sortLastModified" />
@@ -1015,6 +902,7 @@
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<property name="sortFields">
<list>
<ref bean="sortLastModified" />
<ref bean="sortScore" />
<ref bean="sortTitle" />
<ref bean="sortDateIssued" />
@@ -1282,7 +1170,7 @@
</bean>
<bean id="publicationRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="publication"/>
<property name="id" value="publicationRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -1415,7 +1303,7 @@
</bean>
<bean id="personRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="person"/>
<property name="id" value="personRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -1532,7 +1420,7 @@
</bean>
<bean id="projectRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="project"/>
<property name="id" value="projectRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -1652,7 +1540,7 @@
<bean id="orgUnitRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype">
<property name="id" value="orgUnit"/>
<property name="id" value="orgUnitRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -1777,7 +1665,7 @@
<bean id="journalIssueRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype">
<property name="id" value="journalIssue"/>
<property name="id" value="journalIssueRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -1897,7 +1785,7 @@
<bean id="journalVolumeRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype">
<property name="id" value="journalVolume"/>
<property name="id" value="journalVolumeRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -2016,7 +1904,7 @@
<bean id="journalRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype">
<property name="id" value="journal"/>
<property name="id" value="journalRelationships"/>
<property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets">
@@ -2232,18 +2120,6 @@
</bean>
<!--TagCloud configuration bean for homepage discovery configuration-->
<bean id="homepageTagCloudFacetConfiguration" class="org.dspace.discovery.configuration.TagCloudFacetConfiguration">
<!-- Actual configuration of the tagcloud (colors, sorting, etc.) -->
<property name="tagCloudConfiguration" ref="tagCloudConfiguration"/>
<!-- List of tagclouds to appear, one for every search filter, one after the other -->
<property name="tagCloudFacets">
<list>
<ref bean="searchFilterSubject" />
</list>
</property>
</bean>
<!--TagCloud configuration bean for default discovery configuration-->
<bean id="defaultTagCloudFacetConfiguration" class="org.dspace.discovery.configuration.TagCloudFacetConfiguration">
<!-- Actual configuration of the tagcloud (colors, sorting, etc.) -->

View File

@@ -21,4 +21,8 @@
<!-- Additional item.compile plugin to enrich field with information about
Creative Commons License metadata -->
<bean class="org.dspace.xoai.app.CCElementItemCompilePlugin"/>
<!-- Additional item.compile plugin to enrich field with information about
Access Status metadata -->
<bean class="org.dspace.xoai.app.plugins.AccessStatusElementItemCompilePlugin"/>
</beans>

View File

@@ -283,6 +283,7 @@
<!-- used by the DSpace Discovery Solr Indexer to track the last time a document was indexed -->
<field name="SolrIndexer.lastIndexed" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" />
<field name="lastModified" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" />
<copyField source="lastModified" dest="lastModified_dt" />
<!-- used to filter out items that are older versions of another item -->
<field name="latestVersion" type="boolean" indexed="true" stored="true" default="true" multiValued="false" omitNorms="true"/>