Merge remote-tracking branch 'origin/main' into duracom-136_CST-9634

# Conflicts:
#	dspace/config/spring/api/discovery.xml
This commit is contained in:
eskander
2023-06-06 17:30:53 +03:00
60 changed files with 3979 additions and 406 deletions

View File

@@ -23,6 +23,7 @@ import java.util.UUID;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.tika.Tika;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
protected boolean zip = false; protected boolean zip = false;
protected boolean remoteUrl = false; protected boolean remoteUrl = false;
protected String zipfilename = null; protected String zipfilename = null;
protected boolean zipvalid = false;
protected boolean help = false; protected boolean help = false;
protected File workDir = null; protected File workDir = null;
protected File workFile = null; protected File workFile = null;
@@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
handler.logInfo("***End of Test Run***"); handler.logInfo("***End of Test Run***");
} }
} finally { } finally {
// clean work dir
if (zip) { if (zip) {
FileUtils.deleteDirectory(new File(sourcedir)); // if zip file was valid then clean sourcedir
FileUtils.deleteDirectory(workDir); if (zipvalid && sourcedir != null && new File(sourcedir).exists()) {
if (remoteUrl && workFile != null && workFile.exists()) { FileUtils.deleteDirectory(new File(sourcedir));
}
// clean workdir
if (workDir != null && workDir.exists()) {
FileUtils.deleteDirectory(workDir);
}
// conditionally clean workFile if import was done in the UI or via a URL and it still exists
if (workFile != null && workFile.exists()) {
workFile.delete(); workFile.delete();
} }
} }
@@ -329,7 +339,14 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
// manage zip via remote url // manage zip via remote url
optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
} }
if (optionalFileStream.isPresent()) { if (optionalFileStream.isPresent()) {
// validate zip file
Optional<InputStream> validationFileStream = handler.getFileStream(context, zipfilename);
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID()); + zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
@@ -337,10 +354,32 @@ public class ItemImport extends DSpaceRunnable<ItemImportScriptConfiguration> {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename); "Error reading file, the file couldn't be found for filename: " + zipfilename);
} }
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} }
/**
* Confirm that the zip file has the correct MIME type
* @param inputStream
*/
protected void validateZip(InputStream inputStream) {
Tika tika = new Tika();
try {
String mimeType = tika.detect(inputStream);
if (mimeType.equals("application/zip")) {
zipvalid = true;
} else {
handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType);
throw new UnsupportedOperationException("A valid zip file must be supplied");
}
} catch (IOException e) {
throw new IllegalArgumentException(
"There was an error while reading the zip file: " + zipfilename);
}
}
/** /**
* Read the mapfile * Read the mapfile
* @param context * @param context

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.net.URL; import java.net.URL;
import java.sql.SQLException; import java.sql.SQLException;
@@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport {
// If this is a zip archive, unzip it first // If this is a zip archive, unzip it first
if (zip) { if (zip) {
if (!remoteUrl) { if (!remoteUrl) {
// confirm zip file exists
File myZipFile = new File(sourcedir + File.separator + zipfilename);
if ((!myZipFile.exists()) || (!myZipFile.isFile())) {
throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename);
}
// validate zip file
InputStream validationFileStream = new FileInputStream(myZipFile);
validateZip(validationFileStream);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID()); + File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip( sourcedir = itemImportService.unzip(
@@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport {
// manage zip via remote url // manage zip via remote url
Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); Optional<InputStream> optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (optionalFileStream.isPresent()) { if (optionalFileStream.isPresent()) {
// validate zip file via url
Optional<InputStream> validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream());
if (validationFileStream.isPresent()) {
validateZip(validationFileStream.get());
}
workFile = new File(itemImportService.getTempWorkDir() + File.separator workFile = new File(itemImportService.getTempWorkDir() + File.separator
+ zipfilename + "-" + context.getCurrentUser().getID()); + zipfilename + "-" + context.getCurrentUser().getID());
FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile);
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR
+ File.separator + context.getCurrentUser().getID());
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} else { } else {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Error reading file, the file couldn't be found for filename: " + zipfilename); "Error reading file, the file couldn't be found for filename: " + zipfilename);
} }
workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR);
sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath());
} }
} }
} }

View File

@@ -22,11 +22,13 @@ import org.dspace.sort.SortOption;
* This class holds all the information about a specifically configured * This class holds all the information about a specifically configured
* BrowseIndex. It is responsible for parsing the configuration, understanding * BrowseIndex. It is responsible for parsing the configuration, understanding
* about what sort options are available, and what the names of the database * about what sort options are available, and what the names of the database
* tables that hold all the information are actually called. * tables that hold all the information are actually called. Hierarchical browse
* indexes also contain information about the vocabulary they're using, see:
* {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex}
* *
* @author Richard Jones * @author Richard Jones
*/ */
public final class BrowseIndex { public class BrowseIndex {
/** the configuration number, as specified in the config */ /** the configuration number, as specified in the config */
/** /**
* used for single metadata browse tables for generating the table name * used for single metadata browse tables for generating the table name
@@ -102,7 +104,7 @@ public final class BrowseIndex {
* *
* @param baseName The base of the table name * @param baseName The base of the table name
*/ */
private BrowseIndex(String baseName) { protected BrowseIndex(String baseName) {
try { try {
number = -1; number = -1;
tableBaseName = baseName; tableBaseName = baseName;

View File

@@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO {
} }
private void addDefaultFilterQueries(DiscoverQuery query) { private void addDefaultFilterQueries(DiscoverQuery query) {
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container);
discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries);
} }

View File

@@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.HarvestedItem;
import org.dspace.harvest.service.HarvestedItemService; import org.dspace.harvest.service.HarvestedItemService;
import org.dspace.identifier.DOI;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
import org.dspace.identifier.service.DOIService;
import org.dspace.identifier.service.IdentifierService; import org.dspace.identifier.service.IdentifierService;
import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidHistory;
import org.dspace.orcid.OrcidQueue; import org.dspace.orcid.OrcidQueue;
@@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
protected IdentifierService identifierService; protected IdentifierService identifierService;
@Autowired(required = true) @Autowired(required = true)
protected DOIService doiService;
@Autowired(required = true)
protected VersioningService versioningService; protected VersioningService versioningService;
@Autowired(required = true) @Autowired(required = true)
protected HarvestedItemService harvestedItemService; protected HarvestedItemService harvestedItemService;
@@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
// Remove any Handle // Remove any Handle
handleService.unbindHandle(context, item); handleService.unbindHandle(context, item);
// Delete a DOI if linked to the item.
// If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid,
// hibernate will throw a foreign constraint exception.
// Here we use the DOI service directly as it is able to manage DOIs even without any configured
// consumer or provider.
DOI doi = doiService.findDOIByDSpaceObject(context, item);
if (doi != null) {
doi.setDSpaceObject(null);
}
// remove version attached to the item // remove version attached to the item
removeVersion(context, item); removeVersion(context, item);

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
@@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.core.service.PluginService; import org.dspace.core.service.PluginService;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions = protected Map<String, Map<String, List<String>>> authoritiesFormDefinitions =
new HashMap<String, Map<String, List<String>>>(); new HashMap<String, Map<String, List<String>>>();
// Map of vocabulary authorities to and their index info equivalent
protected Map<String, DSpaceControlledVocabularyIndex> vocabularyIndexMap = new HashMap<>();
// the item submission reader // the item submission reader
private SubmissionConfigReader itemSubmissionConfigReader; private SubmissionConfigReader itemSubmissionConfigReader;
@@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
protected ConfigurationService configurationService; protected ConfigurationService configurationService;
@Autowired(required = true) @Autowired(required = true)
protected PluginService pluginService; protected PluginService pluginService;
@Autowired
private DiscoveryConfigurationService searchConfigurationService;
final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin.";
final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation.";
@@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService
HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName);
return ma.getParentChoice(authorityName, vocabularyId, locale); return ma.getParentChoice(authorityName, vocabularyId, locale);
} }
@Override
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) {
if (this.vocabularyIndexMap.containsKey(nameVocab)) {
return this.vocabularyIndexMap.get(nameVocab);
} else {
init();
ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab);
if (source != null && source instanceof DSpaceControlledVocabulary) {
Set<String> metadataFields = new HashSet<>();
Map<String, List<String>> formsToFields = this.authoritiesFormDefinitions.get(nameVocab);
for (Map.Entry<String, List<String>> formToField : formsToFields.entrySet()) {
metadataFields.addAll(formToField.getValue().stream().map(value ->
StringUtils.replace(value, "_", "."))
.collect(Collectors.toList()));
}
DiscoverySearchFilterFacet matchingFacet = null;
for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) {
boolean coversAllFieldsFromVocab = true;
for (String fieldFromVocab: metadataFields) {
boolean coversFieldFromVocab = false;
for (String facetMdField: facetConfig.getMetadataFields()) {
if (facetMdField.startsWith(fieldFromVocab)) {
coversFieldFromVocab = true;
break;
}
}
if (!coversFieldFromVocab) {
coversAllFieldsFromVocab = false;
break;
}
}
if (coversAllFieldsFromVocab) {
matchingFacet = facetConfig;
break;
}
}
DSpaceControlledVocabularyIndex vocabularyIndex =
new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields,
matchingFacet);
this.vocabularyIndexMap.put(nameVocab, vocabularyIndex);
return vocabularyIndex;
}
return null;
}
}
} }

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.util.Set;
import org.dspace.browse.BrowseIndex;
import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
/**
* Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a
* {@code BrowseIndexRest}
* cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)}
*
* @author Marie Verdonck (Atmire) on 04/05/2023
*/
public class DSpaceControlledVocabularyIndex extends BrowseIndex {
protected DSpaceControlledVocabulary vocabulary;
protected Set<String> metadataFields;
protected DiscoverySearchFilterFacet facetConfig;
public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set<String> metadataFields,
DiscoverySearchFilterFacet facetConfig) {
super(controlledVocabulary.vocabularyName);
this.vocabulary = controlledVocabulary;
this.metadataFields = metadataFields;
this.facetConfig = facetConfig;
}
public DSpaceControlledVocabulary getVocabulary() {
return vocabulary;
}
public Set<String> getMetadataFields() {
return this.metadataFields;
}
public DiscoverySearchFilterFacet getFacetConfig() {
return this.facetConfig;
}
}

View File

@@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choice; import org.dspace.content.authority.Choice;
import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.Choices; import org.dspace.content.authority.Choices;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
/** /**
* Broker for ChoiceAuthority plugins, and for other information configured * Broker for ChoiceAuthority plugins, and for other information configured
@@ -220,4 +221,7 @@ public interface ChoiceAuthorityService {
* @return the parent Choice object if any * @return the parent Choice object if any
*/ */
public Choice getParentChoice(String authorityName, String vocabularyId, String locale); public Choice getParentChoice(String authorityName, String vocabularyId, String locale);
public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab);
} }

View File

@@ -18,6 +18,9 @@ import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder; import org.dspace.discovery.utils.DiscoverQueryBuilder;
@@ -73,35 +76,80 @@ public class SearchUtils {
searchService = null; searchService = null;
} }
/**
* Retrieves the Discovery Configuration for a null context, prefix and DSpace object.
* This will result in returning the default configuration
* @return the default configuration
*/
public static DiscoveryConfiguration getDiscoveryConfiguration() { public static DiscoveryConfiguration getDiscoveryConfiguration() {
return getDiscoveryConfiguration(null, null); return getDiscoveryConfiguration(null, null, null);
} }
public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { /**
return getDiscoveryConfiguration(null, dso); * Retrieves the Discovery Configuration with a null prefix for a DSpace object.
* @param context
* the dabase context
* @param dso
* the DSpace object
* @return the Discovery Configuration for the specified DSpace object
*/
public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) {
return getDiscoveryConfiguration(context, null, dso);
} }
/** /**
* Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A
* null prefix mean the normal query, other predefined values are workspace or workflow * null prefix mean the normal query, other predefined values are workspace or workflow
* *
*
* @param context
* the database context
* @param prefix * @param prefix
* the namespace of the configuration to lookup if any * the namespace of the configuration to lookup if any
* @param dso * @param dso
* the DSpaceObject * the DSpaceObject
* @return the discovery configuration for the specified scope * @return the discovery configuration for the specified scope
*/ */
public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix,
DSpaceObject dso) {
if (prefix != null) { if (prefix != null) {
return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix);
} else { } else {
return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); return getDiscoveryConfigurationByDSO(context, dso);
} }
} }
/**
* Retrieve the configuration for the current dspace object and all its parents and add it to the provided set
* @param context - The database context
* @param configurations - The set of configurations to add the retrieved configurations to
* @param prefix - The namespace of the configuration to lookup if any
* @param dso - The DSpace Object
* @return the set of configurations with additional retrieved ones for the dspace object and parents
* @throws SQLException
*/
public static Set<DiscoveryConfiguration> addDiscoveryConfigurationForParents(
Context context, Set<DiscoveryConfiguration> configurations, String prefix, DSpaceObject dso)
throws SQLException {
if (dso == null) {
configurations.add(getDiscoveryConfigurationByName(null));
return configurations;
}
if (prefix != null) {
configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle()));
} else {
configurations.add(getDiscoveryConfigurationByName(dso.getHandle()));
}
DSpaceObjectService<DSpaceObject> dSpaceObjectService = ContentServiceFactory.getInstance()
.getDSpaceObjectService(dso);
DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso);
return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject);
}
/** /**
* Return the discovery configuration identified by the specified name * Return the discovery configuration identified by the specified name
* *
* @param configurationName the configuration name assigned to the bean in the * @param configurationName the configuration name assigned to the bean in the
* discovery.xml * discovery.xml
* @return the discovery configuration * @return the discovery configuration
@@ -113,6 +161,18 @@ public class SearchUtils {
return configurationService.getDiscoveryConfiguration(configurationName); return configurationService.getDiscoveryConfiguration(configurationName);
} }
/**
* Return the discovery configuration for the provided DSO
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO
*/
public static DiscoveryConfiguration getDiscoveryConfigurationByDSO(
Context context, DSpaceObject dso) {
DiscoveryConfigurationService configurationService = getConfigurationService();
return configurationService.getDiscoveryDSOConfiguration(context, dso);
}
public static DiscoveryConfigurationService getConfigurationService() { public static DiscoveryConfigurationService getConfigurationService() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager return manager
@@ -127,47 +187,55 @@ public class SearchUtils {
* Method that retrieves a list of all the configuration objects from the given item * Method that retrieves a list of all the configuration objects from the given item
* A configuration object can be returned for each parent community/collection * A configuration object can be returned for each parent community/collection
* *
* @param context the database context
* @param item the DSpace item * @param item the DSpace item
* @return a list of configuration objects * @return a list of configuration objects
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Item item) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(Context context, Item item)
throws SQLException {
List<Collection> collections = item.getCollections(); List<Collection> collections = item.getCollections();
return getAllDiscoveryConfigurations(null, collections, item); return getAllDiscoveryConfigurations(context, null, collections, item);
} }
/** /**
* Return all the discovery configuration applicable to the provided workspace item * Return all the discovery configuration applicable to the provided workspace item
*
* @param context
* @param witem a workspace item * @param witem a workspace item
* @return a list of discovery configuration * @return a list of discovery configuration
* @throws SQLException * @throws SQLException
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkspaceItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>(); List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection()); collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem());
} }
/** /**
* Return all the discovery configuration applicable to the provided workflow item * Return all the discovery configuration applicable to the provided workflow item
*
* @param context
* @param witem a workflow item * @param witem a workflow item
* @return a list of discovery configuration * @return a list of discovery configuration
* @throws SQLException * @throws SQLException
*/ */
public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { public static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
WorkflowItem witem) throws SQLException {
List<Collection> collections = new ArrayList<Collection>(); List<Collection> collections = new ArrayList<Collection>();
collections.add(witem.getCollection()); collections.add(witem.getCollection());
return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem());
} }
private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(String prefix, private static List<DiscoveryConfiguration> getAllDiscoveryConfigurations(final Context context,
String prefix,
List<Collection> collections, Item item) List<Collection> collections, Item item)
throws SQLException { throws SQLException {
Set<DiscoveryConfiguration> result = new HashSet<>(); Set<DiscoveryConfiguration> result = new HashSet<>();
for (Collection collection : collections) { for (Collection collection : collections) {
DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); addDiscoveryConfigurationForParents(context, result, prefix, collection);
result.add(configuration);
} }
//Add alwaysIndex configurations //Add alwaysIndex configurations

View File

@@ -53,10 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
if (bitstreams != null) { if (bitstreams != null) {
for (Bitstream bitstream : bitstreams) { for (Bitstream bitstream : bitstreams) {
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName());
// Add _keyword and _filter fields which are necessary to support filtering and faceting
// for the file names
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName());
document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName());
String description = bitstream.getDescription(); String description = bitstream.getDescription();
if ((description != null) && !description.isEmpty()) { if ((description != null) && !description.isEmpty()) {
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description);
// Add _keyword and _filter fields which are necessary to support filtering and
// faceting for the descriptions
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword",
description);
document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter",
description);
} }
} }
} }
@@ -65,4 +75,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin {
} }
} }
} }
} }

View File

@@ -7,12 +7,23 @@
*/ */
package org.dspace.discovery.configuration; package org.dspace.discovery.configuration;
import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
@@ -22,9 +33,18 @@ import org.dspace.services.factory.DSpaceServicesFactory;
*/ */
public class DiscoveryConfigurationService { public class DiscoveryConfigurationService {
private static final Logger log = LogManager.getLogger();
private Map<String, DiscoveryConfiguration> map; private Map<String, DiscoveryConfiguration> map;
private Map<Integer, List<String>> toIgnoreMetadataFields = new HashMap<>(); private Map<Integer, List<String>> toIgnoreMetadataFields = new HashMap<>();
/**
* Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its
* own configuration, we take the one of the first parent that does.
* This cache ensures we do not have to go up the hierarchy every time.
*/
private final Map<UUID, DiscoveryConfiguration> comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>();
public Map<String, DiscoveryConfiguration> getMap() { public Map<String, DiscoveryConfiguration> getMap() {
return map; return map;
} }
@@ -41,25 +61,98 @@ public class DiscoveryConfigurationService {
this.toIgnoreMetadataFields = toIgnoreMetadataFields; this.toIgnoreMetadataFields = toIgnoreMetadataFields;
} }
public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { /**
* Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from
* the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will
* be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default
* configuration will be retrieved
*
* When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param indexableObject - The IndexableObject to retrieve the configuration for
* @return the discovery configuration for the provided IndexableObject.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) {
String name; String name;
if (dso == null) { if (indexableObject == null) {
name = "default"; return getDiscoveryConfiguration(null);
} else if (dso instanceof IndexableDSpaceObject) { } else if (indexableObject instanceof IndexableDSpaceObject) {
name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject());
} else { } else {
name = dso.getUniqueIndexID(); name = indexableObject.getUniqueIndexID();
} }
return getDiscoveryConfiguration(name); return getDiscoveryConfiguration(name);
} }
public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { /**
* Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will
* be checked until there is no parent left, in which case the "default" configuration will be returned.
* @param context - The database context
* @param dso - The DSpace object to retrieve the configuration for
* @return the discovery configuration for the provided DSO.
*/
public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) {
// Fall back to default configuration
if (dso == null) {
return getDiscoveryConfiguration(null, true);
}
// Attempt to retrieve cached configuration by UUID
if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) {
return comColToDiscoveryConfigurationMap.get(dso.getID());
}
DiscoveryConfiguration configuration;
// Attempt to retrieve configuration by DSO handle
configuration = getDiscoveryConfiguration(dso.getHandle(), false);
if (configuration == null) {
// Recurse up the Comm/Coll hierarchy until a configuration is found
DSpaceObjectService<DSpaceObject> dSpaceObjectService =
ContentServiceFactory.getInstance().getDSpaceObjectService(dso);
DSpaceObject parentObject = null;
try {
parentObject = dSpaceObjectService.getParentObject(context, dso);
} catch (SQLException e) {
log.error(e);
}
configuration = getDiscoveryDSOConfiguration(context, parentObject);
}
// Cache the resulting configuration when the DSO is a Community or Collection
if (dso instanceof Community || dso instanceof Collection) {
comColToDiscoveryConfigurationMap.put(dso.getID(), configuration);
}
return configuration;
}
/**
* Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the
* default configuration will be returned.
* @param name - The name of the configuration to be retrieved
* @return the Discovery Configuration for the provided name, or default when none was found.
*/
public DiscoveryConfiguration getDiscoveryConfiguration(String name) {
return getDiscoveryConfiguration(name, true);
}
/**
* Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration
* will be returned when no match is found. When useDefault is set to false, null will be returned when no match is
* found.
* @param name - The name of the configuration to retrieve
* @param useDefault - Whether the default configuration should be used when no match is found
* @return the configuration for the provided name
*/
public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) {
DiscoveryConfiguration result; DiscoveryConfiguration result;
result = StringUtils.isBlank(name) ? null : getMap().get(name); result = StringUtils.isBlank(name) ? null : getMap().get(name);
if (result == null) { if (result == null && useDefault) {
//No specific configuration, get the default one //No specific configuration, get the default one
result = getMap().get("default"); result = getMap().get("default");
} }
@@ -67,12 +160,23 @@ public class DiscoveryConfigurationService {
return result; return result;
} }
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, /**
final IndexableObject dso) { * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be
* checked for the provided name. When no match is found for the name, the configuration will be retrieved for the
* IndexableObject
*
* @param context - The database context
* @param configurationName - The name of the configuration to be retrieved
* @param indexableObject - The indexable object to retrieve the configuration for
* @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject
*/
public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context,
String configurationName,
IndexableObject indexableObject) {
if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) {
return getMap().get(configurationName); return getMap().get(configurationName);
} else { } else {
return getDiscoveryConfiguration(dso); return getDiscoveryConfiguration(context, indexableObject);
} }
} }
@@ -92,13 +196,25 @@ public class DiscoveryConfigurationService {
return configs; return configs;
} }
/**
* @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet}
*/
public List<DiscoverySearchFilterFacet> getAllFacetsConfig() {
List<DiscoverySearchFilterFacet> configs = new ArrayList<>();
for (String key : map.keySet()) {
DiscoveryConfiguration config = map.get(key);
configs.addAll(config.getSidebarFacets());
}
return configs;
}
public static void main(String[] args) { public static void main(String[] args) {
System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size());
DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName( .getServiceByName(
DiscoveryConfigurationService.class DiscoveryConfigurationService.class
.getName(), .getName(),
DiscoveryConfigurationService.class); DiscoveryConfigurationService.class);
for (String key : mainService.getMap().keySet()) { for (String key : mainService.getMap().keySet()) {
System.out.println(key); System.out.println(key);
@@ -126,7 +242,7 @@ public class DiscoveryConfigurationService {
System.out.println("Recent submissions configuration:"); System.out.println("Recent submissions configuration:");
DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration
.getRecentSubmissionConfiguration(); .getRecentSubmissionConfiguration();
System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField());
System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax());

View File

@@ -9,6 +9,7 @@ package org.dspace.discovery.configuration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import javax.annotation.Nullable;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -22,6 +23,11 @@ public class DiscoverySortConfiguration {
private List<DiscoverySortFieldConfiguration> sortFields = new ArrayList<DiscoverySortFieldConfiguration>(); private List<DiscoverySortFieldConfiguration> sortFields = new ArrayList<DiscoverySortFieldConfiguration>();
/**
* Default sort configuration to use when needed
*/
@Nullable private DiscoverySortFieldConfiguration defaultSortField;
public List<DiscoverySortFieldConfiguration> getSortFields() { public List<DiscoverySortFieldConfiguration> getSortFields() {
return sortFields; return sortFields;
} }
@@ -30,6 +36,14 @@ public class DiscoverySortConfiguration {
this.sortFields = sortFields; this.sortFields = sortFields;
} }
public DiscoverySortFieldConfiguration getDefaultSortField() {
return defaultSortField;
}
public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) {
this.defaultSortField = configuration;
}
public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) {
if (StringUtils.isBlank(sortField)) { if (StringUtils.isBlank(sortField)) {
return null; return null;

View File

@@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
final Collection collection = indexableCollection.getIndexedObject(); final Collection collection = indexableCollection.getIndexedObject();
// Retrieve configuration // Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(collection); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, collection);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration(); .getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>(); List<String> highlightedMetadataFields = new ArrayList<>();
@@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Ind
return locations; return locations;
} }
} }

View File

@@ -69,7 +69,7 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
final Community community = indexableObject.getIndexedObject(); final Community community = indexableObject.getIndexedObject();
// Retrieve configuration // Retrieve configuration
DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(community); DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, community);
DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration DiscoveryHitHighlightingConfiguration highlightingConfiguration = discoveryConfiguration
.getHitHighlightingConfiguration(); .getHitHighlightingConfiguration();
List<String> highlightedMetadataFields = new ArrayList<>(); List<String> highlightedMetadataFields = new ArrayList<>();
@@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Inde
return locations; return locations;
} }
} }

View File

@@ -80,11 +80,13 @@ public abstract class InprogressSubmissionIndexFactoryImpl
// Add item metadata // Add item metadata
List<DiscoveryConfiguration> discoveryConfigurations; List<DiscoveryConfiguration> discoveryConfigurations;
if (inProgressSubmission instanceof WorkflowItem) { if (inProgressSubmission instanceof WorkflowItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkflowItem) inProgressSubmission);
} else if (inProgressSubmission instanceof WorkspaceItem) { } else if (inProgressSubmission instanceof WorkspaceItem) {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context,
(WorkspaceItem) inProgressSubmission);
} else { } else {
discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
} }
indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations);
indexableCollectionService.storeCommunityCollectionLocations(doc, locations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations);

View File

@@ -160,7 +160,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
} }
// Add the item metadata // Add the item metadata
List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); List<DiscoveryConfiguration> discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item);
addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations);
//mandatory facet to show status on mydspace //mandatory facet to show status on mydspace

View File

@@ -332,7 +332,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
} }
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (searchSortConfiguration.getDefaultSortField() != null) {
sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
} }
@@ -342,7 +344,9 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) {
String sortBy;// Attempt to find the default one, if none found we use SCORE String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score"; sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (searchSortConfiguration.getDefaultSortField() != null) {
sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField();
} else if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) { if (StringUtils.isBlank(defaultSort.getMetadataField())) {

View File

@@ -15,8 +15,8 @@ import java.util.Date;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.DCDate;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO; import org.dspace.importer.external.metadatamapping.MetadatumDTO;
@@ -107,28 +107,30 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
LinkedList<MetadatumDTO> dayList = (LinkedList<MetadatumDTO>) day.contributeMetadata(t); LinkedList<MetadatumDTO> dayList = (LinkedList<MetadatumDTO>) day.contributeMetadata(t);
for (int i = 0; i < yearList.size(); i++) { for (int i = 0; i < yearList.size(); i++) {
DCDate dcDate = null; String resultDateString = "";
String dateString = ""; String dateString = "";
SimpleDateFormat resultFormatter = null;
if (monthList.size() > i && dayList.size() > i) { if (monthList.size() > i && dayList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() +
"-" + dayList.get(i).getValue(); "-" + dayList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM-dd");
} else if (monthList.size() > i) { } else if (monthList.size() > i) {
dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy-MM");
} else { } else {
dateString = yearList.get(i).getValue(); dateString = yearList.get(i).getValue();
resultFormatter = new SimpleDateFormat("yyyy");
} }
int j = 0; int j = 0;
// Use the first dcDate that has been formatted (Config should go from most specific to most lenient) // Use the first dcDate that has been formatted (Config should go from most specific to most lenient)
while (j < dateFormatsToAttempt.size()) { while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) {
String dateFormat = dateFormatsToAttempt.get(j); String dateFormat = dateFormatsToAttempt.get(j);
try { try {
SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
Date date = formatter.parse(dateString); Date date = formatter.parse(dateString);
dcDate = new DCDate(date); resultDateString = resultFormatter.format(date);
values.add(metadataFieldMapping.toDCValue(field, formatter.format(date)));
break;
} catch (ParseException e) { } catch (ParseException e) {
// Multiple dateformats can be configured, we don't want to print the entire stacktrace every // Multiple dateformats can be configured, we don't want to print the entire stacktrace every
// time one of those formats fails. // time one of those formats fails.
@@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor<T> implements MetadataContributor<T>
} }
j++; j++;
} }
if (dcDate == null) { if (StringUtils.isNotBlank(resultDateString)) {
values.add(metadataFieldMapping.toDCValue(field, resultDateString));
} else {
log.info( log.info(
"Failed parsing " + dateString + ", check " + "Failed parsing " + dateString + ", check " +
"the configured dataformats in config/spring/api/pubmed-integration.xml"); "the configured dataformats in config/spring/api/pubmed-integration.xml");

View File

@@ -122,3 +122,5 @@ org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eper
org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided
org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks
org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long! org.dspace.app.rest.exception.PasswordNotValidException.message = New password is invalid. Valid passwords must be at least 8 characters long!
org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \
the repository

View File

@@ -8,6 +8,7 @@
package org.dspace.app.itemimport; package org.dspace.app.itemimport;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File; import java.io.File;
import java.nio.file.Files; import java.nio.file.Files;
@@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.flywaydb.core.internal.util.ExceptionUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@@ -46,6 +48,7 @@ import org.junit.Test;
public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private static final String ZIP_NAME = "saf.zip"; private static final String ZIP_NAME = "saf.zip";
private static final String PDF_NAME = "test.pdf";
private static final String publicationTitle = "A Tale of Two Cities"; private static final String publicationTitle = "A Tale of Two Cities";
private static final String personTitle = "Person Test"; private static final String personTitle = "Person Test";
@@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
private Collection collection; private Collection collection;
private Path tempDir; private Path tempDir;
private Path workDir; private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before @Before
@Override @Override
@@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkMetadata(); checkMetadata();
checkMetadataWithAnotherSchema(); checkMetadataWithAnotherSchema();
checkBitstream(); checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
} }
@Test @Test
@@ -254,6 +262,23 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase {
checkRelationship(); checkRelationship();
} }
@Test
public void importItemByZipSafInvalidMimetype() throws Exception {
// use sample PDF file
Files.copy(getClass().getResourceAsStream("test.pdf"),
Path.of(tempDir.toString() + "/" + PDF_NAME));
String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(),
"-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString()
+ "/mapfile.out" };
try {
perfomImportScript(args);
} catch (Exception e) {
// should throw an exception due to invalid mimetype
assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass());
}
}
@Test @Test
public void resumeImportItemBySafWithMetadataOnly() throws Exception { public void resumeImportItemBySafWithMetadataOnly() throws Exception {
// create simple SAF // create simple SAF

View File

@@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
private Community community; private Community community;
protected CommunityBuilder(Context context) { protected CommunityBuilder(Context context) {
super(context); super(context);
} }
public static CommunityBuilder createCommunity(final Context context) { public static CommunityBuilder createCommunity(final Context context) {
CommunityBuilder builder = new CommunityBuilder(context); CommunityBuilder builder = new CommunityBuilder(context);
return builder.create(); return builder.create(null);
}
public static CommunityBuilder createCommunity(final Context context, String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.create(handle);
} }
private CommunityBuilder create() { private CommunityBuilder create(String handle) {
return createSubCommunity(context, null); return createSubCommunity(context, null, handle);
} }
public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { public static CommunityBuilder createSubCommunity(final Context context, final Community parent) {
CommunityBuilder builder = new CommunityBuilder(context); CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent); return builder.createSub(parent, null);
} }
private CommunityBuilder createSub(final Community parent) { public static CommunityBuilder createSubCommunity(final Context context, final Community parent,
final String handle) {
CommunityBuilder builder = new CommunityBuilder(context);
return builder.createSub(parent, handle);
}
private CommunityBuilder createSub(final Community parent, String handle) {
try { try {
community = communityService.create(parent, context); community = communityService.create(parent, context, handle);
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
return null; return null;
@@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder<Community> {
@Override @Override
public Community build() { public Community build() {
try { try {
communityService.update(context, community); communityService.update(context, community);
context.dispatchEvents(); context.dispatchEvents();

View File

@@ -725,9 +725,6 @@ public class CollectionTest extends AbstractDSpaceObjectTest {
// Allow Item REMOVE perms // Allow Item REMOVE perms
doNothing().when(authorizeServiceSpy) doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE));
// Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion)
doNothing().when(authorizeServiceSpy)
.authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE));
// create & add item first // create & add item first
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();

View File

@@ -1189,8 +1189,6 @@ public class ItemTest extends AbstractDSpaceObjectTest {
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true);
// Allow Item DELETE perms // Allow Item DELETE perms
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE);
// Allow Item WRITE perms (required to first delete identifiers)
doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE);
UUID id = item.getID(); UUID id = item.getID();
itemService.delete(context, item); itemService.delete(context, item);

View File

@@ -7,14 +7,18 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
@@ -24,6 +28,7 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.PoolTaskBuilder;
import org.dspace.builder.WorkflowItemBuilder; import org.dspace.builder.WorkflowItemBuilder;
@@ -39,6 +44,8 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableClaimedTask;
import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.IndexableItem;
@@ -731,6 +738,64 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
} }
} }
/**
* Test designed to check if default sort option for Discovery is working, using <code>workspace</code>
* DiscoveryConfiguration <br/>
* <b>Note</b>: this test will be skipped if <code>workspace</code> do not have a default sort option set and of
* metadataType <code>dc_date_accessioned</code> or <code>lastModified</code>
* @throws SearchServiceException
*/
@Test
public void searchWithDefaultSortServiceTest() throws SearchServiceException {
DiscoveryConfiguration workspaceConf =
SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null);
// Skip if no default sort option set for workspaceConf
if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) {
return;
}
DiscoverySortFieldConfiguration defaultSortField =
workspaceConf.getSearchSortConfiguration().getDefaultSortField();
// Populate the testing objects: create items in eperson's workspace and perform search in it
int numberItems = 10;
context.turnOffAuthorisationSystem();
EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build();
context.setCurrentUser(submitter);
Community community = CommunityBuilder.createCommunity(context).build();
Collection collection = CollectionBuilder.createCollection(context, community).build();
for (int i = 0; i < numberItems; i++) {
ItemBuilder.createItem(context, collection)
.withTitle("item " + i)
.build();
}
context.restoreAuthSystemState();
// Build query with default parameters (except for workspaceConf)
DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder()
.buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null,
null,null);
DiscoverResult result = searchService.search(context, discoverQuery);
/*
// code example for testing against sort by dc_date_accessioned
LinkedList<String> dc_date_accesioneds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getMetadata())
.map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned"))
.map(m -> m.getValue()).findFirst().orElse("")
)
.collect(Collectors.toCollection(LinkedList::new));
}*/
LinkedList<String> lastModifieds = result.getIndexableObjects().stream()
.map(o -> ((Item) o.getIndexedObject()).getLastModified().toString())
.collect(Collectors.toCollection(LinkedList::new));
assertFalse(lastModifieds.isEmpty());
for (int i = 1; i < lastModifieds.size() - 1; i++) {
assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0);
}
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1); assertSearchQuery(resourceType, size, size, 0, -1);
} }

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.repository.BitstreamRestRepository;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* REST controller for handling bulk updates to Bitstream resources.
* <p>
* This controller is responsible for handling requests to the bitstream category, which allows for updating
* multiple bitstream resources in a single operation.
* </p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@RestController
@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME)
public class BitstreamCategoryRestController {
@Autowired
BitstreamRestRepository bitstreamRestRepository;
/**
* Handles PATCH requests to the bitstream category for bulk updates of bitstream resources.
*
* @param request the HTTP request object.
* @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied.
* @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a
* HTTP 204 No Content response since currently only a delete operation is supported.
* @throws SQLException if an error occurs while accessing the database.
* @throws AuthorizeException if the user is not authorized to perform the requested operation.
*/
@RequestMapping(method = RequestMethod.PATCH)
public ResponseEntity<RepresentationModel<?>> patch(HttpServletRequest request,
@RequestBody(required = true) JsonNode jsonNode)
throws SQLException, AuthorizeException {
Context context = obtainContext(request);
bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode);
return ResponseEntity.noContent().build();
}
}

View File

@@ -176,7 +176,7 @@ public class OpenSearchController {
if (dsoObject != null) { if (dsoObject != null) {
container = scopeResolver.resolveScope(context, dsoObject); container = scopeResolver.resolveScope(context, dsoObject);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso("site", container); .getDiscoveryConfiguration(context, container);
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray( .toArray(

View File

@@ -7,12 +7,17 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.projection.Projection;
import org.dspace.browse.BrowseIndex; import org.dspace.browse.BrowseIndex;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.sort.SortException; import org.dspace.sort.SortException;
import org.dspace.sort.SortOption; import org.dspace.sort.SortOption;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -30,18 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
public BrowseIndexRest convert(BrowseIndex obj, Projection projection) { public BrowseIndexRest convert(BrowseIndex obj, Projection projection) {
BrowseIndexRest bir = new BrowseIndexRest(); BrowseIndexRest bir = new BrowseIndexRest();
bir.setProjection(projection); bir.setProjection(projection);
bir.setId(obj.getName());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setMetadataBrowse(obj.isMetadataIndex());
List<String> metadataList = new ArrayList<String>(); List<String> metadataList = new ArrayList<String>();
if (obj.isMetadataIndex()) { String id = obj.getName();
if (obj instanceof DSpaceControlledVocabularyIndex) {
DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj;
metadataList = new ArrayList<>(vocObj.getMetadataFields());
id = vocObj.getVocabulary().getPluginInstanceName();
bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName());
bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName());
bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL);
} else if (obj.isMetadataIndex()) {
for (String s : obj.getMetadata().split(",")) { for (String s : obj.getMetadata().split(",")) {
metadataList.add(s.trim()); metadataList.add(s.trim());
} }
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_VALUE_LIST);
} else { } else {
metadataList.add(obj.getSortOption().getMetadata()); metadataList.add(obj.getSortOption().getMetadata());
bir.setDataType(obj.getDataType());
bir.setOrder(obj.getDefaultOrder());
bir.setBrowseType(BROWSE_TYPE_FLAT);
} }
bir.setId(id);
bir.setMetadataList(metadataList); bir.setMetadataList(metadataList);
List<BrowseIndexRest.SortOption> sortOptionsList = new ArrayList<BrowseIndexRest.SortOption>(); List<BrowseIndexRest.SortOption> sortOptionsList = new ArrayList<BrowseIndexRest.SortOption>();
@@ -52,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverter<BrowseIndex, Browse
} catch (SortException e) { } catch (SortException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
bir.setSortOptions(sortOptionsList); if (!bir.getBrowseType().equals(BROWSE_TYPE_HIERARCHICAL)) {
bir.setSortOptions(sortOptionsList);
}
return bir; return bir;
} }

View File

@@ -80,6 +80,15 @@ public class DiscoverConfigurationConverter
sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name()); sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name());
searchConfigurationRest.addSortOption(sortOption); searchConfigurationRest.addSortOption(sortOption);
} }
DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField();
if (defaultSortField != null) {
SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption();
sortOption.setName(defaultSortField.getMetadataField());
sortOption.setActualName(defaultSortField.getType());
sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name());
searchConfigurationRest.setDefaultSortOption(sortOption);
}
} }
} }

View File

@@ -174,6 +174,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
GroupNameNotProvidedException.class, GroupNameNotProvidedException.class,
GroupHasPendingWorkflowTasksException.class, GroupHasPendingWorkflowTasksException.class,
PasswordNotValidException.class, PasswordNotValidException.class,
RESTBitstreamNotFoundException.class
}) })
protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
TranslatableException ex) throws IOException { TranslatableException ex) throws IOException {

View File

@@ -0,0 +1,51 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.exception;
import java.text.MessageFormat;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
/**
* <p>Extend {@link UnprocessableEntityException} to provide a specific error message
* in the REST response. The error message is added to the response in
* {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException},
* hence it should not contain sensitive or security-compromising info.</p>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException {
public static String uuid;
/**
* @param formatStr string with placeholders, ideally obtained using {@link I18nUtil}
* @return message with bitstream id substituted
*/
private static String formatMessage(String formatStr) {
MessageFormat fmt = new MessageFormat(formatStr);
return fmt.format(new String[]{uuid});
}
public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message";
public RESTBitstreamNotFoundException(String uuid) {
super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY)));
RESTBitstreamNotFoundException.uuid = uuid;
}
public String getMessageKey() {
return MESSAGE_KEY;
}
public String getLocalizedMessage(Context context) {
return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context));
}
}

View File

@@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory<BrowseEntryResourc
UriComponentsBuilder baseLink = uriBuilder( UriComponentsBuilder baseLink = uriBuilder(
getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(), getMethodOn(bix.getCategory(), bix.getType()).findRel(null, null, bix.getCategory(),
English.plural(bix.getType()), bix.getId(), English.plural(bix.getType()), bix.getId(),
BrowseIndexRest.ITEMS, null, null)); BrowseIndexRest.LINK_ITEMS, null, null));
addFilterParams(baseLink, data); addFilterParams(baseLink, data);
list.add(buildLink(BrowseIndexRest.ITEMS, list.add(buildLink(BrowseIndexRest.LINK_ITEMS,
baseLink.build().encode().toUriString())); baseLink.build().encode().toUriString()));
} }
} }

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.model;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.RestResourceController;
@@ -20,11 +21,11 @@ import org.dspace.app.rest.RestResourceController;
*/ */
@LinksRest(links = { @LinksRest(links = {
@LinkRest( @LinkRest(
name = BrowseIndexRest.ITEMS, name = BrowseIndexRest.LINK_ITEMS,
method = "listBrowseItems" method = "listBrowseItems"
), ),
@LinkRest( @LinkRest(
name = BrowseIndexRest.ENTRIES, name = BrowseIndexRest.LINK_ENTRIES,
method = "listBrowseEntries" method = "listBrowseEntries"
) )
}) })
@@ -35,20 +36,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
public static final String CATEGORY = RestAddressableModel.DISCOVER; public static final String CATEGORY = RestAddressableModel.DISCOVER;
public static final String ITEMS = "items"; public static final String LINK_ITEMS = "items";
public static final String ENTRIES = "entries"; public static final String LINK_ENTRIES = "entries";
public static final String LINK_VOCABULARY = "vocabulary";
boolean metadataBrowse; // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types,
// etc. the second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_VALUE_LIST = "valueList";
// if the browse index has one level: the full list of items
public static final String BROWSE_TYPE_FLAT = "flatBrowse";
// if the browse index should display the vocabulary tree. The 1st level shows the tree.
// The second level is the actual list of items linked to a specific entry
public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse";
// Shared fields
String browseType;
@JsonProperty(value = "metadata") @JsonProperty(value = "metadata")
List<String> metadataList; List<String> metadataList;
// Single browse index fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String dataType; String dataType;
@JsonInclude(JsonInclude.Include.NON_NULL)
List<SortOption> sortOptions; List<SortOption> sortOptions;
@JsonInclude(JsonInclude.Include.NON_NULL)
String order; String order;
// Hierarchical browse fields
@JsonInclude(JsonInclude.Include.NON_NULL)
String facetType;
@JsonInclude(JsonInclude.Include.NON_NULL)
String vocabulary;
@JsonIgnore @JsonIgnore
@Override @Override
public String getCategory() { public String getCategory() {
@@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
return NAME; return NAME;
} }
public boolean isMetadataBrowse() {
return metadataBrowse;
}
public void setMetadataBrowse(boolean metadataBrowse) {
this.metadataBrowse = metadataBrowse;
}
public List<String> getMetadataList() { public List<String> getMetadataList() {
return metadataList; return metadataList;
} }
@@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest<String> {
this.sortOptions = sortOptions; this.sortOptions = sortOptions;
} }
/**
* - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names,
* subjects, types, etc. the second level is the actual list of items linked to a specific entry
* - flatBrowse if the browse index has one level: the full list of items
* - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree.
* The second level is the actual list of items linked to a specific entry
*/
public void setBrowseType(String browseType) {
this.browseType = browseType;
}
public String getBrowseType() {
return browseType;
}
public void setFacetType(String facetType) {
this.facetType = facetType;
}
public String getFacetType() {
return facetType;
}
public void setVocabulary(String vocabulary) {
this.vocabulary = vocabulary;
}
public String getVocabulary() {
return vocabulary;
}
@Override @Override
public Class getController() { public Class getController() {
return RestResourceController.class; return RestResourceController.class;

View File

@@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
private List<Filter> filters = new LinkedList<>(); private List<Filter> filters = new LinkedList<>();
private List<SortOption> sortOptions = new LinkedList<>(); private List<SortOption> sortOptions = new LinkedList<>();
private SortOption defaultSortOption;
public String getCategory() { public String getCategory() {
return CATEGORY; return CATEGORY;
} }
@@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest<String> {
return sortOptions; return sortOptions;
} }
public SortOption getDefaultSortOption() {
return defaultSortOption;
}
public void setDefaultSortOption(SortOption defaultSortOption) {
this.defaultSortOption = defaultSortOption;
}
@Override @Override
public boolean equals(Object object) { public boolean equals(Object object) {
return (object instanceof SearchConfigurationRest && return (object instanceof SearchConfigurationRest &&

View File

@@ -7,9 +7,20 @@
*/ */
package org.dspace.app.rest.model.hateoas; package org.dspace.app.rest.model.hateoas;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import org.atteo.evo.inflector.English;
import org.dspace.app.rest.RestResourceController;
import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.app.rest.model.VocabularyRest;
import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource;
import org.dspace.app.rest.utils.Utils; import org.dspace.app.rest.utils.Utils;
import org.dspace.content.authority.ChoiceAuthority;
import org.dspace.content.authority.factory.ContentAuthorityServiceFactory;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.springframework.hateoas.Link;
import org.springframework.web.util.UriComponentsBuilder;
/** /**
* Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource
@@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils;
*/ */
@RelNameDSpaceResource(BrowseIndexRest.NAME) @RelNameDSpaceResource(BrowseIndexRest.NAME)
public class BrowseIndexResource extends DSpaceResource<BrowseIndexRest> { public class BrowseIndexResource extends DSpaceResource<BrowseIndexRest> {
public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { public BrowseIndexResource(BrowseIndexRest bix, Utils utils) {
super(bix, utils); super(bix, utils);
// TODO: the following code will force the embedding of items and // TODO: the following code will force the embedding of items and
// entries in the browseIndex we need to find a way to populate the rels // entries in the browseIndex we need to find a way to populate the rels
// array from the request/projection right now it is always null // array from the request/projection right now it is always null
// super(bix, utils, "items", "entries"); // super(bix, utils, "items", "entries");
if (bix.isMetadataBrowse()) { if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES));
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) {
add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS));
}
if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) {
ChoiceAuthorityService choiceAuthorityService =
ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService();
ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary());
UriComponentsBuilder baseLink = linkTo(
methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null,
null, VocabularyRest.CATEGORY,
English.plural(VocabularyRest.NAME), source.getPluginInstanceName(),
"", null, null)).toUriComponentsBuilder();
add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY));
} }
add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS));
} }
} }

View File

@@ -15,9 +15,12 @@ import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter; import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.JsonPatchConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
@@ -38,6 +41,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.handle.service.HandleService; import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -72,6 +76,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
@Autowired @Autowired
private HandleService handleService; private HandleService handleService;
@Autowired
ConfigurationService configurationService;
@Autowired @Autowired
public BitstreamRestRepository(BitstreamService dsoService) { public BitstreamRestRepository(BitstreamService dsoService) {
super(dsoService); super(dsoService);
@@ -248,4 +255,25 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstrea
return converter.toRest(targetBundle, utils.obtainProjection()); return converter.toRest(targetBundle, utils.obtainProjection());
} }
/**
* Method that will transform the provided PATCH json body into a list of operations.
* The operations will be handled by a supporting class resolved by the
* {@link org.dspace.app.rest.repository.patch.ResourcePatch#patch} method.
*
* @param context The context
* @param jsonNode the json body provided from the request body
*/
public void patchBitstreamsInBulk(Context context, JsonNode jsonNode) throws SQLException {
int operationsLimit = configurationService.getIntProperty("rest.patch.operations.limit", 1000);
ObjectMapper mapper = new ObjectMapper();
JsonPatchConverter patchConverter = new JsonPatchConverter(mapper);
Patch patch = patchConverter.convert(jsonNode);
if (patch.getOperations().size() > operationsLimit) {
throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " +
operationsLimit);
}
resourcePatch.patch(obtainContext(), null, patch.getOperations());
context.commit();
}
} }

View File

@@ -40,7 +40,7 @@ import org.springframework.stereotype.Component;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES)
public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository { implements LinkRestRepository {
@@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository
@Override @Override
public boolean isEmbeddableRelation(Object data, String name) { public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data; BrowseIndexRest bir = (BrowseIndexRest) data;
if (bir.isMetadataBrowse() && "entries".equals(name)) { if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) &&
name.equals(BrowseIndexRest.LINK_ENTRIES)) {
return true; return true;
} }
return false; return false;

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.repository; package org.dspace.app.rest.repository;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@@ -17,7 +18,10 @@ import org.dspace.app.rest.model.BrowseIndexRest;
import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseException;
import org.dspace.browse.BrowseIndex; import org.dspace.browse.BrowseIndex;
import org.dspace.browse.CrossLinks; import org.dspace.browse.CrossLinks;
import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
import org.dspace.content.authority.service.ChoiceAuthorityService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
@@ -31,26 +35,48 @@ import org.springframework.stereotype.Component;
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME)
public class BrowseIndexRestRepository extends DSpaceRestRepository<BrowseIndexRest, String> { public class BrowseIndexRestRepository extends DSpaceRestRepository<BrowseIndexRest, String> {
@Autowired
private ChoiceAuthorityService choiceAuthorityService;
@Override @Override
@PreAuthorize("permitAll()") @PreAuthorize("permitAll()")
public BrowseIndexRest findOne(Context context, String name) { public BrowseIndexRest findOne(Context context, String name) {
BrowseIndexRest bi = null; BrowseIndexRest bi = createFromMatchingBrowseIndex(name);
if (bi == null) {
bi = createFromMatchingVocabulary(name);
}
return bi;
}
private BrowseIndexRest createFromMatchingVocabulary(String name) {
DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name);
if (vocabularyIndex != null) {
return converter.toRest(vocabularyIndex, utils.obtainProjection());
}
return null;
}
private BrowseIndexRest createFromMatchingBrowseIndex(String name) {
BrowseIndex bix; BrowseIndex bix;
try { try {
bix = BrowseIndex.getBrowseIndex(name); bix = BrowseIndex.getBrowseIndex(name);
} catch (BrowseException e) { } catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
if (bix != null) { if (bix != null) {
bi = converter.toRest(bix, utils.obtainProjection()); return converter.toRest(bix, utils.obtainProjection());
} }
return bi; return null;
} }
@Override @Override
public Page<BrowseIndexRest> findAll(Context context, Pageable pageable) { public Page<BrowseIndexRest> findAll(Context context, Pageable pageable) {
try { try {
List<BrowseIndex> indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); List<BrowseIndex> indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices()));
choiceAuthorityService.getChoiceAuthoritiesNames()
.stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null)
.forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name)));
return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection());
} catch (BrowseException e) { } catch (BrowseException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);

View File

@@ -42,7 +42,7 @@ import org.springframework.stereotype.Component;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS)
public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository { implements LinkRestRepository {
@@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository
@Override @Override
public boolean isEmbeddableRelation(Object data, String name) { public boolean isEmbeddableRelation(Object data, String name) {
BrowseIndexRest bir = (BrowseIndexRest) data; BrowseIndexRest bir = (BrowseIndexRest) data;
if (!bir.isMetadataBrowse() && "items".equals(name)) { if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) &&
name.equals(BrowseIndexRest.LINK_ITEMS)) {
return true; return true;
} }
return false; return false;

View File

@@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection());
} }
@@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Context context = obtainContext(); Context context = obtainContext();
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null; DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null; DiscoverQuery discoverQuery = null;
@@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration);
} }
@@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix,
query, searchFilters, dsoTypes, page, facetName); query, searchFilters, dsoTypes, page, facetName);
@@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository {
Pageable page = PageRequest.of(1, 1); Pageable page = PageRequest.of(1, 1);
IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject);
DiscoverResult searchResult = null; DiscoverResult searchResult = null;
DiscoverQuery discoverQuery = null; DiscoverQuery discoverQuery = null;

View File

@@ -0,0 +1,79 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository.patch.operation;
import java.io.IOException;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.rest.exception.RESTBitstreamNotFoundException;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Bitstream;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.stereotype.Component;
/**
* A PATCH operation for removing bitstreams in bulk from the repository.
*
* Example: <code>
* curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json"
* -d '[
* {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"},
* {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"}
* ]'
* </code>
*
* @author Jens Vannerum (jens.vannerum@atmire.com)
*/
@Component
public class BitstreamRemoveOperation extends PatchOperation<Bitstream> {
@Autowired
BitstreamService bitstreamService;
@Autowired
AuthorizeService authorizeService;
public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/";
@Override
public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException {
String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, "");
Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete));
if (bitstreamToDelete == null) {
throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete);
}
authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE);
try {
bitstreamService.delete(context, bitstreamToDelete);
} catch (AuthorizeException | IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
return null;
}
@Override
public boolean supports(Object objectToMatch, Operation operation) {
return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) &&
operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE);
}
public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation)
throws SQLException {
try {
authorizeService.authorizeAction(context, bitstream, operation);
} catch (AuthorizeException e) {
throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e);
}
}
}

View File

@@ -14,6 +14,7 @@ import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
@@ -82,6 +83,7 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter; private DSpaceRunnableParameterConverter dSpaceRunnableParameterConverter;
private Collection collection; private Collection collection;
private Path workDir; private Path workDir;
private static final String TEMP_DIR = ItemImport.TEMP_DIR;
@Before @Before
@Override @Override
@@ -126,6 +128,10 @@ public class ItemImportIT extends AbstractEntityIntegrationTest {
checkMetadata(); checkMetadata();
checkMetadataWithAnotherSchema(); checkMetadataWithAnotherSchema();
checkBitstream(); checkBitstream();
// confirm that TEMP_DIR still exists
File workTempDir = new File(workDir + File.separator + TEMP_DIR);
assertTrue(workTempDir.exists());
} }
@Test @Test

View File

@@ -7,12 +7,16 @@
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_OK;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist;
import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE;
import static org.dspace.core.Constants.WRITE; import static org.dspace.core.Constants.WRITE;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
@@ -21,9 +25,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.ws.rs.core.MediaType;
import org.apache.commons.codec.CharEncoding; import org.apache.commons.codec.CharEncoding;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
@@ -33,6 +39,7 @@ import org.dspace.app.rest.matcher.BundleMatcher;
import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.matcher.HalMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.RemoveOperation;
import org.dspace.app.rest.model.patch.ReplaceOperation; import org.dspace.app.rest.model.patch.ReplaceOperation;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.app.rest.test.MetadataPatchSuite;
@@ -41,6 +48,7 @@ import org.dspace.builder.BitstreamBuilder;
import org.dspace.builder.BundleBuilder; import org.dspace.builder.BundleBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder; import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EPersonBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.builder.ResourcePolicyBuilder;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
@@ -52,15 +60,20 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataSchemaEnum; import org.dspace.content.MetadataSchemaEnum;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.web.servlet.MvcResult;
public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest {
@@ -79,6 +92,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
@Autowired @Autowired
private ItemService itemService; private ItemService itemService;
@Autowired
CollectionService collectionService;
@Autowired
CommunityService communityService;
@Test @Test
public void findAllTest() throws Exception { public void findAllTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below //We turn off the authorization system in order to create the structure as defined below
@@ -2370,6 +2389,513 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
)); ));
} }
@Test
public void deleteBitstreamsInBulk() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Verify that only the three bitstreams were deleted and the fourth one still exists
Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3));
Assert.assertTrue(bitstreamExists(token, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidUUID() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// For the third bitstream, use an invalid UUID
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
UUID randomUUID = UUID.randomUUID();
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID);
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
MvcResult result = getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnprocessableEntity())
.andReturn();
// Verify our custom error message is returned when an invalid UUID is used
assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository",
result.getResponse().getErrorMessage());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
// But set the rest.patch.operations.limit property to 2, so that the request is invalid
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isBadRequest());
// Verify that no bitstreams were deleted since the request was invalid
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
}
@Test
public void deleteBitstreamsInBulk_Unauthorized() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(admin.getEmail(), password);
Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4));
getClient().perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isUnauthorized());
}
@Test
public void deleteBitstreamsInBulk_Forbidden() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection collection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection")
.build();
Item publicItem1 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, collection)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
}
@Test
public void deleteBitstreamsInBulk_collectionAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson col1Admin = EPersonBuilder.createEPerson(context)
.withEmail("col1admin@test.com")
.withPassword(password)
.build();
EPerson col2Admin = EPersonBuilder.createEPerson(context)
.withEmail("col2admin@test.com")
.withPassword(password)
.build();
Group col1_AdminGroup = collectionService.createAdministrators(context, col1);
Group col2_AdminGroup = collectionService.createAdministrators(context, col2);
groupService.addMember(context, col1_AdminGroup, col1Admin);
groupService.addMember(context, col2_AdminGroup, col2Admin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(col1Admin.getEmail(), password);
// Should return forbidden since one of the bitstreams does not originate form collection 1
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(2);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
// Change the token to the admin of collection 2
token = getAuthToken(col2Admin.getEmail(), password);
// Add three out of four bitstreams to the list of bitstreams to be deleted
ops = new ArrayList<>();
removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp1);
removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp2);
removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID());
ops.add(removeOp3);
patchBody = getPatchContent(ops);
// Should return forbidden since one of the bitstreams does not originate form collection 2
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isForbidden());
// Remove the bitstream that does not originate from the collection we are administrator of, should return OK
ops.remove(0);
patchBody = getPatchContent(ops);
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
@Test
public void deleteBitstreamsInBulk_communityAdmin() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 1")
.build();
Collection col2 = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Collection 2")
.build();
EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context)
.withEmail("parentComAdmin@test.com")
.withPassword(password)
.build();
Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity);
groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin);
Item publicItem1 = ItemBuilder.createItem(context, col1)
.withTitle("Test item 1")
.build();
Item publicItem2 = ItemBuilder.createItem(context, col2)
.withTitle("Test item 2")
.build();
String bitstreamContent = "This is an archived bitstream";
Bitstream bitstream1 = null;
Bitstream bitstream2 = null;
Bitstream bitstream3 = null;
Bitstream bitstream4 = null;
try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) {
bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 1")
.withMimeType("text/plain")
.build();
bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is)
.withName("Bitstream 2")
.withMimeType("text/plain")
.build();
bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 3")
.withMimeType("text/plain")
.build();
bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is)
.withName("Bitstream 4")
.withMimeType("text/plain")
.build();
}
context.restoreAuthSystemState();
// Add three out of four bitstreams to the list of bitstreams to be deleted
List<Operation> ops = new ArrayList<>();
RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID());
ops.add(removeOp1);
RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID());
ops.add(removeOp2);
RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID());
ops.add(removeOp3);
String patchBody = getPatchContent(ops);
String token = getAuthToken(parentCommunityAdmin.getEmail(), password);
// Bitstreams originate from two different collections, but those collections live in the same community, so
// a community admin should be able to delete them
getClient(token).perform(patch("/api/core/bitstreams")
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isNoContent());
}
public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_OK) {
return false;
}
}
return true;
}
public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception {
for (Bitstream bitstream : bitstreams) {
if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID()))
.andReturn().getResponse().getStatus() != SC_NOT_FOUND) {
return false;
}
}
return true;
}
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@@ -63,22 +64,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
//We expect the content type to be "application/hal+json;charset=UTF-8" //We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType)) .andExpect(content().contentType(contentType))
//Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page
// object // object
.andExpect(jsonPath("$.page.size", is(20))) .andExpect(jsonPath("$.page.size", is(20)))
.andExpect(jsonPath("$.page.totalElements", is(4))) .andExpect(jsonPath("$.page.totalElements", is(5)))
.andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.totalPages", is(1)))
.andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$.page.number", is(0)))
//The array of browse index should have a size 4 //The array of browse index should have a size 5
.andExpect(jsonPath("$._embedded.browses", hasSize(4))) .andExpect(jsonPath("$._embedded.browses", hasSize(5)))
//Check that all (and only) the default browse indexes are present //Check that all (and only) the default browse indexes are present
.andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder(
BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.dateIssuedBrowseIndex("asc"),
BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"),
BrowseIndexMatcher.titleBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"),
BrowseIndexMatcher.subjectBrowseIndex("asc") BrowseIndexMatcher.subjectBrowseIndex("asc"),
BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")
))) )))
; ;
} }
@@ -125,6 +127,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
; ;
} }
@Test
public void findBrowseByVocabulary() throws Exception {
//Use srsc as this vocabulary is included by default
//When we call the root endpoint
getClient().perform(get("/api/discover/browses/srsc"))
//The status has to be 200 OK
.andExpect(status().isOk())
//We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType))
//Check that the JSON root matches the expected browse index
.andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc")))
;
}
@Test @Test
public void findBrowseBySubject() throws Exception { public void findBrowseBySubject() throws Exception {
//When we call the root endpoint //When we call the root endpoint
@@ -2142,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author" // The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author"))) .andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse // It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true))) .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)))
; ;
} }
@@ -2159,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
// The browse definition ID should be "author" // The browse definition ID should be "author"
.andExpect(jsonPath("$.id", is("author"))) .andExpect(jsonPath("$.id", is("author")))
// It should be configured as a metadata browse // It should be configured as a metadata browse
.andExpect(jsonPath("$.metadataBrowse", is(true))); .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST)));
} }
@Test @Test

View File

@@ -153,6 +153,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums.add(title); metadatums.add(title);
metadatums.add(author); metadatums.add(author);
@@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums.add(issn); metadatums.add(issn);
metadatums.add(volume); metadatums.add(volume);
metadatums.add(issue); metadatums.add(issue);
metadatums.add(publisher);
ImportRecord firstrRecord = new ImportRecord(metadatums); ImportRecord firstrRecord = new ImportRecord(metadatums);
@@ -179,6 +182,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060");
MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1");
MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2");
MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null,
"Petro Mohyla Black Sea National University");
metadatums2.add(title2); metadatums2.add(title2);
metadatums2.add(author2); metadatums2.add(author2);
@@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
metadatums2.add(issn2); metadatums2.add(issn2);
metadatums2.add(volume2); metadatums2.add(volume2);
metadatums2.add(issue2); metadatums2.add(issue2);
metadatums2.add(publisher2);
ImportRecord secondRecord = new ImportRecord(metadatums2); ImportRecord secondRecord = new ImportRecord(metadatums2);
records.add(firstrRecord); records.add(firstrRecord);
@@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt
return records; return records;
} }
} }

View File

@@ -0,0 +1,677 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.dspace.app.rest.matcher.FacetEntryMatcher;
import org.dspace.app.rest.matcher.FacetValueMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.service.CollectionService;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This class tests the correct inheritance of Discovery configurations for sub communities and collections.
* To thoroughly test this, a community and collection structure is set up to where different communities have custom
* configurations configured for them.
*
* The following structure is uses:
* - Parent Community 1 - Custom configuration: discovery-parent-community-1
* -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1
* -- Collection 111 - Custom configuration: discovery-collection-1-1-1
* -- Collection 112
* -- Subcommunity 12
* -- Collection 121 - Custom configuration: discovery-collection-1-2-1
* -- Collection 122
* - Parent Community 2
* -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1
* -- Collection 211 - Custom configuration: discovery-collection-2-1-1
* -- Collection 212
* -- Subcommunity 22
* -- Collection 221 - Custom configuration: discovery-collection-2-2-1
* -- Collection 222
*
* Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is
* indexed and provided for the different search scopes.
*
* Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222,
* and one in collection 122 and 211.
*
* The tests will verify that for each object, the correct facets are provided and that all the necessary fields to
* power these facets are indexed properly.
*
* This file requires the discovery configuration in the following test file:
* src/test/data/dspaceFolder/config/spring/api/test-discovery.xml
*/
public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest {
@Autowired
CollectionService collectionService;
private Community parentCommunity1;
private Community subcommunity11;
private Community subcommunity12;
private Collection collection111;
private Collection collection112;
private Collection collection121;
private Collection collection122;
private Community parentCommunity2;
private Community subcommunity21;
private Community subcommunity22;
private Collection collection211;
private Collection collection212;
private Collection collection221;
private Collection collection222;
@Before
public void setUp() throws Exception {
super.setUp();
context.turnOffAuthorisationSystem();
MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build();
MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build();
parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1")
.build();
subcommunity11 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1")
.build();
subcommunity12 = CommunityBuilder
.createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2")
.build();
collection111 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1")
.build();
collection112 = CollectionBuilder
.createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2")
.build();
collection121 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1")
.build();
collection122 = CollectionBuilder
.createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2")
.build();
parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2")
.build();
subcommunity21 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1")
.build();
subcommunity22 = CommunityBuilder
.createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2")
.build();
collection211 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1")
.build();
collection212 = CollectionBuilder
.createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2")
.build();
collection221 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1")
.build();
collection222 = CollectionBuilder
.createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2")
.build();
Item item111 = ItemBuilder.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-item111")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111")
.withMetadata("dc", "test", "collection111field", "collection111field-item111")
.withMetadata("dc", "test", "collection121field", "collection121field-item111")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111")
.withMetadata("dc", "test", "collection211field", "collection211field-item111")
.withMetadata("dc", "test", "collection221field", "collection221field-item111")
.build();
Item item112 = ItemBuilder.createItem(context, collection112)
.withMetadata("dc", "contributor", "author", "author-item112")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112")
.withMetadata("dc", "test", "collection111field", "collection111field-item112")
.withMetadata("dc", "test", "collection121field", "collection121field-item112")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112")
.withMetadata("dc", "test", "collection211field", "collection211field-item112")
.withMetadata("dc", "test", "collection221field", "collection221field-item112")
.build();
Item item121 = ItemBuilder.createItem(context, collection121)
.withMetadata("dc", "contributor", "author", "author-item121")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121")
.withMetadata("dc", "test", "collection111field", "collection111field-item121")
.withMetadata("dc", "test", "collection121field", "collection121field-item121")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121")
.withMetadata("dc", "test", "collection211field", "collection211field-item121")
.withMetadata("dc", "test", "collection221field", "collection221field-item121")
.build();
Item item122 = ItemBuilder.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-item122")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122")
.withMetadata("dc", "test", "collection111field", "collection111field-item122")
.withMetadata("dc", "test", "collection121field", "collection121field-item122")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122")
.withMetadata("dc", "test", "collection211field", "collection211field-item122")
.withMetadata("dc", "test", "collection221field", "collection221field-item122")
.build();
Item item211 = ItemBuilder.createItem(context, collection211)
.withMetadata("dc", "contributor", "author", "author-item211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211")
.withMetadata("dc", "test", "collection111field", "collection111field-item211")
.withMetadata("dc", "test", "collection121field", "collection121field-item211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211")
.withMetadata("dc", "test", "collection211field", "collection211field-item211")
.withMetadata("dc", "test", "collection221field", "collection221field-item211")
.build();
Item item212 = ItemBuilder.createItem(context, collection212)
.withMetadata("dc", "contributor", "author", "author-item212")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212")
.withMetadata("dc", "test", "collection111field", "collection111field-item212")
.withMetadata("dc", "test", "collection121field", "collection121field-item212")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212")
.withMetadata("dc", "test", "collection211field", "collection211field-item212")
.withMetadata("dc", "test", "collection221field", "collection221field-item212")
.build();
Item item221 = ItemBuilder.createItem(context, collection221)
.withMetadata("dc", "contributor", "author", "author-item221")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221")
.withMetadata("dc", "test", "collection111field", "collection111field-item221")
.withMetadata("dc", "test", "collection121field", "collection121field-item221")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221")
.withMetadata("dc", "test", "collection211field", "collection211field-item221")
.withMetadata("dc", "test", "collection221field", "collection221field-item221")
.build();
Item item222 = ItemBuilder.createItem(context, collection222)
.withMetadata("dc", "contributor", "author", "author-item222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222")
.withMetadata("dc", "test", "collection111field", "collection111field-item222")
.withMetadata("dc", "test", "collection121field", "collection121field-item222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222")
.withMetadata("dc", "test", "collection211field", "collection211field-item222")
.withMetadata("dc", "test", "collection221field", "collection221field-item222")
.build();
Item mappedItem111222 = ItemBuilder
.createItem(context, collection111)
.withMetadata("dc", "contributor", "author", "author-mappedItem111222")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222")
.build();
Item mappedItem122211 = ItemBuilder
.createItem(context, collection122)
.withMetadata("dc", "contributor", "author", "author-mappedItem122211")
.withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211")
.withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211")
.withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211")
.withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211")
.withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211")
.withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211")
.build();
collectionService.addItem(context, collection222, mappedItem111222);
collectionService.addItem(context, collection211, mappedItem122211);
context.dispatchEvents();
context.restoreAuthSystemState();
}
@Test
/**
* Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(parentCommunity1.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item111", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item112", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem111222",
1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(subcommunity11.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item111", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1),
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111.
*/
public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection111field", "text")))
);
getClient().perform(get("/api/discover/facets/collection111field")
.param("scope", String.valueOf(collection111.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection111field",
"collection111field-item111", 1),
FacetValueMatcher.matchEntry("collection111field",
"collection111field-mappedItem111222", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited
* correctly for Collection 112.
*/
public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity11field")
.param("scope", String.valueOf(collection112.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity11field",
"subcommunity11field-item112", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Subcommunity 12.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(subcommunity12.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item121", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121.
*/
public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection121field", "text")))
);
getClient().perform(get("/api/discover/facets/collection121field")
.param("scope", String.valueOf(collection121.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection121field",
"collection121field-item121", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited
* correctly for Collection 122.
*/
public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text")))
);
getClient().perform(get("/api/discover/facets/parentcommunity1field")
.param("scope", String.valueOf(collection122.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-item122", 1),
FacetValueMatcher.matchEntry("parentcommunity1field",
"parentcommunity1field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Parent Community 2.
*/
public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21.
*/
public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(subcommunity21.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item211", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1),
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211.
*/
public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection211field", "text")))
);
getClient().perform(get("/api/discover/facets/collection211field")
.param("scope", String.valueOf(collection211.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection211field",
"collection211field-item211", 1),
FacetValueMatcher.matchEntry("collection211field",
"collection211field-mappedItem122211", 1)
)
));
}
@Test
/**
* Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited
* correctly for Collection 212.
*/
public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text")))
);
getClient().perform(get("/api/discover/facets/subcommunity21field")
.param("scope", String.valueOf(collection212.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("subcommunity21field",
"subcommunity21field-item212", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Subcommunity 22.
*/
public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
@Test
/**
* Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221.
*/
public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.matchFacet(false, "collection221field", "text")))
);
getClient().perform(get("/api/discover/facets/collection221field")
.param("scope", String.valueOf(collection221.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._embedded.values",
containsInAnyOrder(
FacetValueMatcher.matchEntry("collection221field",
"collection221field-item221", 1)
)
));
}
@Test
/**
* Verify that the default configuration is inherited correctly when no other custom configuration can be inherited
* for Collection 222.
*/
public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception {
getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID())))
.andExpect(status().isOk())
.andExpect(jsonPath("$.type", is("discover")))
.andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets")))
.andExpect(jsonPath("$._embedded.facets", containsInAnyOrder(
FacetEntryMatcher.authorFacet(false),
FacetEntryMatcher.subjectFacet(false),
FacetEntryMatcher.dateIssuedFacet(false),
FacetEntryMatcher.hasContentInOriginalBundleFacet(false),
FacetEntryMatcher.entityTypeFacet(false)
))
);
}
}

View File

@@ -0,0 +1,213 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.impl.client.CloseableHttpClient;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Integration tests for {@link PubmedImportMetadataSourceServiceImpl}
*
* @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com)
*/
public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest {
@Autowired
private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl;
@Autowired
private LiveImportClientImpl liveImportClientImpl;
@Test
public void pubmedImportMetadataGetRecordsTest() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
@Test
public void pubmedImportMetadataGetRecords2Test() throws Exception {
context.turnOffAuthorisationSystem();
CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient();
CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class);
try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml");
InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) {
liveImportClientImpl.setHttpClient(httpClient);
CloseableHttpResponse fetchResponse = mockResponse(
IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK");
CloseableHttpResponse searchResponse = mockResponse(
IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK");
when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse);
context.restoreAuthSystemState();
ArrayList<ImportRecord> collection2match = getRecords2();
Collection<ImportRecord> recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1);
assertEquals(1, recordsImported.size());
matchRecords(new ArrayList<ImportRecord>(recordsImported), collection2match);
} finally {
liveImportClientImpl.setHttpClient(originalHttpClient);
}
}
private ArrayList<ImportRecord> getRecords() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null,
"Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review.");
MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize"
+ " the main strategies for teaching clinical reasoning described in the literature in the context of"
+ " advanced clinical practice and promote new areas of research to improve the pedagogical approach"
+ " to clinical reasoning in Advanced Practice Nursing.");
MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and"
+ " clinical thinking are essential elements in the advanced nursing clinical practice decision-making"
+ " process. The quality improvement of care is related to the development of those skills."
+ " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical"
+ " reasoning in advanced clinical practice.");
MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was"
+ " conducted using the framework developed by Arksey and O'Malley as a research strategy."
+ " Consistent with the nature of scoping reviews, a study protocol has been established.");
MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and"
+ " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary"
+ " revision studies, published in biomedical databases, were selected, including qualitative ones."
+ " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID."
+ " Three authors independently evaluated the articles for titles, abstracts, and full text.");
MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined,"
+ " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility,"
+ " and 27 were included in the scoping review. The results that emerged from the review were"
+ " interpreted and grouped into three macro strategies (simulations-based education, art and visual"
+ " thinking, and other learning approaches) and nineteen educational interventions.");
MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different"
+ " strategies, the simulations are the most used. Despite this, our scoping review reveals that is"
+ " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic"
+ " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to"
+ " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to"
+ " acquire an adequate level of judgment and critical thinking. Therefore, it will be"
+ " necessary to relate teaching methodologies with the skills developed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana");
MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola");
MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina");
MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia");
MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing");
MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning");
MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking");
MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies");
MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education");
MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology");
metadatums.add(title);
metadatums.add(description1);
metadatums.add(description2);
metadatums.add(description3);
metadatums.add(description4);
metadatums.add(description5);
metadatums.add(description6);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(author3);
metadatums.add(author4);
metadatums.add(author5);
metadatums.add(author6);
metadatums.add(date);
metadatums.add(language);
metadatums.add(subject1);
metadatums.add(subject2);
metadatums.add(subject3);
metadatums.add(subject4);
metadatums.add(subject5);
metadatums.add(subject6);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
private ArrayList<ImportRecord> getRecords2() {
ArrayList<ImportRecord> records = new ArrayList<>();
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
//define first record
MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez.");
MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely"
+ " used interfaces for the retrieval of information from biological databases is the NCBI Entrez"
+ " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between"
+ " the individual entries found in numerous public databases. The existence of such natural"
+ " connections, mostly biological in nature, argued for the development of a method through which"
+ " all the information about a particular biological entity could be found without having to"
+ " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based"
+ " searches, illustrating the types of information that can be retrieved through the Entrez system."
+ " An alternate protocol builds upon the first basic protocol, using additional,"
+ " built-in features of the Entrez system, and providing alternative ways to issue the initial query."
+ " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure"
+ " visualization tool, is also discussed.");
MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942");
MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen");
MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D");
MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10");
MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en");
metadatums.add(title);
metadatums.add(description);
metadatums.add(identifierOther);
metadatums.add(author1);
metadatums.add(author2);
metadatums.add(date);
metadatums.add(language);
ImportRecord record = new ImportRecord(metadatums);
records.add(record);
return records;
}
}

View File

@@ -8,6 +8,9 @@
package org.dspace.app.rest.matcher; package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL;
import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST;
import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL;
import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
@@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
/** /**
* Utility class to construct a Matcher for a browse index * Utility class to construct a Matcher for a browse index
@@ -31,7 +33,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> subjectBrowseIndex(final String order) { public static Matcher<? super Object> subjectBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.subject.*")), hasJsonPath("$.metadata", contains("dc.subject.*")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -44,7 +47,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> titleBrowseIndex(final String order) { public static Matcher<? super Object> titleBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.title")), hasJsonPath("$.metadata", contains("dc.title")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.dataType", equalToIgnoringCase("title")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -56,7 +60,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> contributorBrowseIndex(final String order) { public static Matcher<? super Object> contributorBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")),
hasJsonPath("$.metadataBrowse", Matchers.is(true)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.dataType", equalToIgnoringCase("text")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -69,7 +74,8 @@ public class BrowseIndexMatcher {
public static Matcher<? super Object> dateIssuedBrowseIndex(final String order) { public static Matcher<? super Object> dateIssuedBrowseIndex(final String order) {
return allOf( return allOf(
hasJsonPath("$.metadata", contains("dc.date.issued")), hasJsonPath("$.metadata", contains("dc.date.issued")),
hasJsonPath("$.metadataBrowse", Matchers.is(false)), hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.dataType", equalToIgnoringCase("date")),
hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.order", equalToIgnoringCase(order)),
hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")),
@@ -77,4 +83,22 @@ public class BrowseIndexMatcher {
hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items"))
); );
} }
public static Matcher<? super Object> hierarchicalBrowseIndex(final String vocabulary) {
return allOf(
hasJsonPath("$.metadata", contains("dc.subject")),
hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)),
hasJsonPath("$.type", equalToIgnoringCase("browse")),
hasJsonPath("$.facetType", equalToIgnoringCase("subject")),
hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)),
hasJsonPath("$._links.vocabulary.href",
is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))),
hasJsonPath("$._links.items.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))),
hasJsonPath("$._links.entries.href",
is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))),
hasJsonPath("$._links.self.href",
is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary)))
);
}
} }

View File

@@ -110,6 +110,17 @@ public class FacetEntryMatcher {
); );
} }
public static Matcher<? super Object> matchFacet(boolean hasNext, String name, String facetType) {
return allOf(
hasJsonPath("$.name", is(name)),
hasJsonPath("$.facetType", is(facetType)),
hasJsonPath("$.facetLimit", any(Integer.class)),
hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)),
hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name))
);
}
/** /**
* Check that a facet over the dc.type exists and match the default configuration * Check that a facet over the dc.type exists and match the default configuration
* *

View File

@@ -60,6 +60,16 @@ public class FacetValueMatcher {
); );
} }
public static Matcher<? super Object> matchEntry(String facet, String label, int count) {
return allOf(
hasJsonPath("$.label", is(label)),
hasJsonPath("$.type", is("discover")),
hasJsonPath("$.count", is(count)),
hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")),
hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals"))
);
}
public static Matcher<? super Object> entrySubjectWithAuthority(String label, String authority, int count) { public static Matcher<? super Object> entrySubjectWithAuthority(String label, String authority, int count) {
return allOf( return allOf(

View File

@@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest {
sortConfiguration.setSortFields(listSortField); sortConfiguration.setSortFields(listSortField);
sortConfiguration.setDefaultSortField(defaultSort);
discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); discoveryConfiguration.setSearchSortConfiguration(sortConfiguration);
DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet();
@@ -167,6 +169,16 @@ public class RestDiscoverQueryBuilderTest {
page.getOffset(), "SCORE", "ASC"); page.getOffset(), "SCORE", "ASC");
} }
@Test
public void testSortByDefaultSortField() throws Exception {
page = PageRequest.of(2, 10);
restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page);
verify(discoverQueryBuilder, times(1))
.buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(),
page.getPageSize(), page.getOffset(), null, null);
}
@Test(expected = DSpaceBadRequestException.class) @Test(expected = DSpaceBadRequestException.class)
public void testCatchIllegalArgumentException() throws Exception { public void testCatchIllegalArgumentException() throws Exception {
when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(),

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE eSearchResult PUBLIC "-//NLM//DTD esearch 20060628//EN" "https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd">
<eSearchResult>
<Count>1</Count>
<RetMax>1</RetMax>
<RetStart>0</RetStart>
<QueryKey>1</QueryKey>
<WebEnv>MCID_64784b5ab65e3b2b2253cd3a</WebEnv>
<IdList>
<Id>36708638</Id>
</IdList>
<TranslationSet/>
<QueryTranslation>"10 1016 j nepr 2023 103548"[All Fields]</QueryTranslation>
</eSearchResult>

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE eSearchResult PUBLIC "-//NLM//DTD esearch 20060628//EN" "https://eutils.ncbi.nlm.nih.gov/eutils/dtd/20060628/esearch.dtd">
<eSearchResult>
<Count>1</Count>
<RetMax>1</RetMax>
<RetStart>0</RetStart>
<QueryKey>1</QueryKey>
<WebEnv>MCID_64784b12ccf058150336d6a8</WebEnv>
<IdList>
<Id>21975942</Id>
</IdList>
<TranslationSet/>
<QueryTranslation>"10 1002 0471142905 hg0610s71"[All Fields]</QueryTranslation>
</eSearchResult>

View File

@@ -0,0 +1,194 @@
<?xml version="1.0" ?>
<!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2023//EN" "https://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_230101.dtd">
<PubmedArticleSet>
<PubmedArticle>
<MedlineCitation Status="MEDLINE" Owner="NLM" IndexingMethod="Automated">
<PMID Version="1">36708638</PMID>
<DateCompleted>
<Year>2023</Year>
<Month>02</Month>
<Day>23</Day>
</DateCompleted>
<DateRevised>
<Year>2023</Year>
<Month>02</Month>
<Day>23</Day>
</DateRevised>
<Article PubModel="Print-Electronic">
<Journal>
<ISSN IssnType="Electronic">1873-5223</ISSN>
<JournalIssue CitedMedium="Internet">
<Volume>67</Volume>
<PubDate>
<Year>2023</Year>
<Month>Feb</Month>
</PubDate>
</JournalIssue>
<Title>Nurse education in practice</Title>
<ISOAbbreviation>Nurse Educ Pract</ISOAbbreviation>
</Journal>
<ArticleTitle>Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review.</ArticleTitle>
<Pagination>
<StartPage>103548</StartPage>
<MedlinePgn>103548</MedlinePgn>
</Pagination>
<ELocationID EIdType="doi" ValidYN="Y">10.1016/j.nepr.2023.103548</ELocationID>
<ELocationID EIdType="pii" ValidYN="Y">S1471-5953(23)00010-0</ELocationID>
<Abstract>
<AbstractText Label="AIM/OBJECTIVE" NlmCategory="OBJECTIVE">To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing.</AbstractText>
<AbstractText Label="BACKGROUND" NlmCategory="BACKGROUND">Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice.</AbstractText>
<AbstractText Label="DESIGN" NlmCategory="METHODS">A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established.</AbstractText>
<AbstractText Label="METHODS" NlmCategory="METHODS">The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text.</AbstractText>
<AbstractText Label="RESULTS" NlmCategory="RESULTS">1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions.</AbstractText>
<AbstractText Label="CONCLUSIONS" NlmCategory="CONCLUSIONS">Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed.</AbstractText>
<CopyrightInformation>Copyright &#xa9; 2023 Elsevier Ltd. All rights reserved.</CopyrightInformation>
</Abstract>
<AuthorList CompleteYN="Y">
<Author ValidYN="Y">
<LastName>Giuffrida</LastName>
<ForeName>Silvia</ForeName>
<Initials>S</Initials>
<AffiliationInfo>
<Affiliation>Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Silano</LastName>
<ForeName>Verdiana</ForeName>
<Initials>V</Initials>
<AffiliationInfo>
<Affiliation>Nursing Direction of Settore Anziani Citt&#xe0; di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Ramacciati</LastName>
<ForeName>Nicola</ForeName>
<Initials>N</Initials>
<AffiliationInfo>
<Affiliation>Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Prandi</LastName>
<ForeName>Cesarina</ForeName>
<Initials>C</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Baldon</LastName>
<ForeName>Alessia</ForeName>
<Initials>A</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
<Author ValidYN="Y">
<LastName>Bianchi</LastName>
<ForeName>Monica</ForeName>
<Initials>M</Initials>
<AffiliationInfo>
<Affiliation>Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch.</Affiliation>
</AffiliationInfo>
</Author>
</AuthorList>
<Language>eng</Language>
<PublicationTypeList>
<PublicationType UI="D016428">Journal Article</PublicationType>
<PublicationType UI="D016454">Review</PublicationType>
</PublicationTypeList>
<ArticleDate DateType="Electronic">
<Year>2023</Year>
<Month>01</Month>
<Day>17</Day>
</ArticleDate>
</Article>
<MedlineJournalInfo>
<Country>Scotland</Country>
<MedlineTA>Nurse Educ Pract</MedlineTA>
<NlmUniqueID>101090848</NlmUniqueID>
<ISSNLinking>1471-5953</ISSNLinking>
</MedlineJournalInfo>
<CitationSubset>IM</CitationSubset>
<MeshHeadingList>
<MeshHeading>
<DescriptorName UI="D006801" MajorTopicYN="N">Humans</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D057179" MajorTopicYN="Y">Advanced Practice Nursing</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D007858" MajorTopicYN="N">Learning</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D003479" MajorTopicYN="N">Curriculum</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D013850" MajorTopicYN="N">Thinking</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D000086723" MajorTopicYN="N">Clinical Reasoning</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D013338" MajorTopicYN="Y">Students, Nursing</DescriptorName>
</MeshHeading>
</MeshHeadingList>
<KeywordList Owner="NOTNLM">
<Keyword MajorTopicYN="N">Advanced practice nursing</Keyword>
<Keyword MajorTopicYN="N">Clinical reasoning</Keyword>
<Keyword MajorTopicYN="N">Critical thinking</Keyword>
<Keyword MajorTopicYN="N">Educational strategies</Keyword>
<Keyword MajorTopicYN="N">Nursing education</Keyword>
<Keyword MajorTopicYN="N">Teaching methodology</Keyword>
</KeywordList>
<CoiStatement>Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper.</CoiStatement>
</MedlineCitation>
<PubmedData>
<History>
<PubMedPubDate PubStatus="received">
<Year>2022</Year>
<Month>11</Month>
<Day>9</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="revised">
<Year>2022</Year>
<Month>12</Month>
<Day>17</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="accepted">
<Year>2023</Year>
<Month>1</Month>
<Day>10</Day>
</PubMedPubDate>
<PubMedPubDate PubStatus="pubmed">
<Year>2023</Year>
<Month>1</Month>
<Day>29</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="medline">
<Year>2023</Year>
<Month>2</Month>
<Day>25</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="entrez">
<Year>2023</Year>
<Month>1</Month>
<Day>28</Day>
<Hour>18</Hour>
<Minute>7</Minute>
</PubMedPubDate>
</History>
<PublicationStatus>ppublish</PublicationStatus>
<ArticleIdList>
<ArticleId IdType="pubmed">36708638</ArticleId>
<ArticleId IdType="doi">10.1016/j.nepr.2023.103548</ArticleId>
<ArticleId IdType="pii">S1471-5953(23)00010-0</ArticleId>
</ArticleIdList>
</PubmedData>
</PubmedArticle>
</PubmedArticleSet>

View File

@@ -0,0 +1,132 @@
<?xml version="1.0" ?>
<!DOCTYPE PubmedArticleSet PUBLIC "-//NLM//DTD PubMedArticle, 1st January 2023//EN" "https://dtd.nlm.nih.gov/ncbi/pubmed/out/pubmed_230101.dtd">
<PubmedArticleSet>
<PubmedArticle>
<MedlineCitation Status="MEDLINE" Owner="NLM">
<PMID Version="1">21975942</PMID>
<DateCompleted>
<Year>2012</Year>
<Month>01</Month>
<Day>13</Day>
</DateCompleted>
<DateRevised>
<Year>2016</Year>
<Month>10</Month>
<Day>21</Day>
</DateRevised>
<Article PubModel="Print">
<Journal>
<ISSN IssnType="Electronic">1934-8258</ISSN>
<JournalIssue CitedMedium="Internet">
<Volume>Chapter 6</Volume>
<PubDate>
<Year>2011</Year>
<Month>Oct</Month>
</PubDate>
</JournalIssue>
<Title>Current protocols in human genetics</Title>
<ISOAbbreviation>Curr Protoc Hum Genet</ISOAbbreviation>
</Journal>
<ArticleTitle>Searching NCBI Databases Using Entrez.</ArticleTitle>
<Pagination>
<StartPage>Unit6.10</StartPage>
<MedlinePgn>Unit6.10</MedlinePgn>
</Pagination>
<ELocationID EIdType="doi" ValidYN="Y">10.1002/0471142905.hg0610s71</ELocationID>
<Abstract>
<AbstractText>One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed.</AbstractText>
<CopyrightInformation>&#xa9; 2011 by John Wiley &amp; Sons, Inc.</CopyrightInformation>
</Abstract>
<AuthorList CompleteYN="Y">
<Author ValidYN="Y">
<LastName>Gibney</LastName>
<ForeName>Gretchen</ForeName>
<Initials>G</Initials>
</Author>
<Author ValidYN="Y">
<LastName>Baxevanis</LastName>
<ForeName>Andreas D</ForeName>
<Initials>AD</Initials>
</Author>
</AuthorList>
<Language>eng</Language>
<PublicationTypeList>
<PublicationType UI="D016428">Journal Article</PublicationType>
</PublicationTypeList>
</Article>
<MedlineJournalInfo>
<Country>United States</Country>
<MedlineTA>Curr Protoc Hum Genet</MedlineTA>
<NlmUniqueID>101287858</NlmUniqueID>
<ISSNLinking>1934-8258</ISSNLinking>
</MedlineJournalInfo>
<CitationSubset>IM</CitationSubset>
<MeshHeadingList>
<MeshHeading>
<DescriptorName UI="D000818" MajorTopicYN="N">Animals</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D003628" MajorTopicYN="N">Database Management Systems</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D016208" MajorTopicYN="Y">Databases, Factual</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D006801" MajorTopicYN="N">Humans</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D016247" MajorTopicYN="N">Information Storage and Retrieval</DescriptorName>
<QualifierName UI="Q000379" MajorTopicYN="Y">methods</QualifierName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D020407" MajorTopicYN="Y">Internet</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D008968" MajorTopicYN="N">Molecular Conformation</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D009317" MajorTopicYN="N">National Library of Medicine (U.S.)</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D039781" MajorTopicYN="N">PubMed</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D014481" MajorTopicYN="N" Type="Geographic">United States</DescriptorName>
</MeshHeading>
<MeshHeading>
<DescriptorName UI="D014584" MajorTopicYN="Y">User-Computer Interface</DescriptorName>
</MeshHeading>
</MeshHeadingList>
</MedlineCitation>
<PubmedData>
<History>
<PubMedPubDate PubStatus="entrez">
<Year>2011</Year>
<Month>10</Month>
<Day>7</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="pubmed">
<Year>2011</Year>
<Month>10</Month>
<Day>7</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
<PubMedPubDate PubStatus="medline">
<Year>2012</Year>
<Month>1</Month>
<Day>14</Day>
<Hour>6</Hour>
<Minute>0</Minute>
</PubMedPubDate>
</History>
<PublicationStatus>ppublish</PublicationStatus>
<ArticleIdList>
<ArticleId IdType="pubmed">21975942</ArticleId>
<ArticleId IdType="doi">10.1002/0471142905.hg0610s71</ArticleId>
</ArticleIdList>
</PubmedData>
</PubmedArticle>
</PubmedArticleSet>

View File

@@ -25,6 +25,10 @@ rest.projections.full.max = 2
# This property determines the max embed depth for a SpecificLevelProjection # This property determines the max embed depth for a SpecificLevelProjection
rest.projection.specificLevel.maxEmbed = 5 rest.projection.specificLevel.maxEmbed = 5
# This property determines the max amount of rest operations that can be performed at the same time, for example when
# batch removing bitstreams. The default value is set to 1000.
rest.patch.operations.limit = 1000
# Define which configuration properties are exposed through the http://<dspace.server.url>/api/config/properties/ # Define which configuration properties are exposed through the http://<dspace.server.url>/api/config/properties/
# rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will # rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will
# respond that the property wasn't found. This property can be defined multiple times to allow access to multiple # respond that the property wasn't found. This property can be defined multiple times to allow access to multiple

View File

@@ -30,6 +30,7 @@
<entry key-ref="crossref.volume" value-ref="crossrefVolume" /> <entry key-ref="crossref.volume" value-ref="crossrefVolume" />
<entry key-ref="crossref.issue" value-ref="crossrefIssue" /> <entry key-ref="crossref.issue" value-ref="crossrefIssue" />
<entry key-ref="crossref.abstract" value-ref="crossrefAbstract" /> <entry key-ref="crossref.abstract" value-ref="crossrefAbstract" />
<entry key-ref="crossref.publisher" value-ref="crossrefPublisher" />
</util:map> </util:map>
<bean id="crossrefIDContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleJsonPathMetadataContributor"> <bean id="crossrefIDContrib" class="org.dspace.importer.external.metadatamapping.contributor.SimpleJsonPathMetadataContributor">
@@ -137,6 +138,14 @@
<constructor-arg value="dc.description.abstract"/> <constructor-arg value="dc.description.abstract"/>
</bean> </bean>
<bean id="crossrefPublisher" class="org.dspace.importer.external.metadatamapping.contributor.SimpleJsonPathMetadataContributor">
<property name="field" ref="crossref.publisher"/>
<property name="query" value="/publisher"/>
</bean>
<bean id="crossref.publisher" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">
<constructor-arg value="dc.publisher"/>
</bean>
<bean class="java.lang.Integer" id="maxRetry"> <bean class="java.lang.Integer" id="maxRetry">
<constructor-arg value="3"/> <constructor-arg value="3"/>
</bean> </bean>

View File

@@ -57,9 +57,6 @@
<!-- Same as the "default" configuration, but does NOT filter out older versions of items --> <!-- Same as the "default" configuration, but does NOT filter out older versions of items -->
<!-- Used to display related items on single-item pages, because a relationship does not always point to the latest version of the related item --> <!-- Used to display related items on single-item pages, because a relationship does not always point to the latest version of the related item -->
<entry key="default-relationships" value-ref="defaultRelationshipsConfiguration" /> <entry key="default-relationships" value-ref="defaultRelationshipsConfiguration" />
<!--Use site to override the default configuration for the home page & default discovery page-->
<entry key="site" value-ref="homepageConfiguration" />
<!--<entry key="123456789/7621" value-ref="defaultConfiguration"/>--> <!--<entry key="123456789/7621" value-ref="defaultConfiguration"/>-->
<!-- Used to show filters and results on MyDSpace --> <!-- Used to show filters and results on MyDSpace -->
<!-- Do not change the id of special entries or else they won't work --> <!-- Do not change the id of special entries or else they won't work -->
@@ -75,7 +72,6 @@
<entry key="undiscoverable" value-ref="unDiscoverableItems" /> <entry key="undiscoverable" value-ref="unDiscoverableItems" />
<entry key="administrativeView" value-ref="administrativeView" /> <entry key="administrativeView" value-ref="administrativeView" />
<entry key="administrativeBulkAccess" value-ref="administrativeBulkAccessConfiguration" />
<entry key="publication" value-ref="publication"/> <entry key="publication" value-ref="publication"/>
<!-- Same as the "publication" configuration, but does NOT filter out older versions of items --> <!-- Same as the "publication" configuration, but does NOT filter out older versions of items -->
@@ -726,262 +722,6 @@
<property name="spellCheckEnabled" value="true"/> <property name="spellCheckEnabled" value="true"/>
</bean> </bean>
<!--The configuration settings for discovery of withdrawn and undiscoverable items (admin only) and regular items-->
<bean id="administrativeBulkAccessConfiguration" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="administrativeBulkAccess"/>
<property name="indexAlways" value="true"/>
<property name="sidebarFacets">
<list>
<ref bean="searchFilterObjectType" />
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterEntityType"/>
</list>
</property>
<!-- Set TagCloud configuration per discovery configuration -->
<property name="tagCloudFacetConfiguration" ref="defaultTagCloudFacetConfiguration"/>
<!--The search filters which can be used on the discovery search page-->
<property name="searchFilters">
<list>
<ref bean="searchFilterObjectType" />
<ref bean="searchFilterTitle" />
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterFileNameInOriginalBundle" />
<ref bean="searchFilterFileDescriptionInOriginalBundle" />
<ref bean="searchFilterEntityType"/>
<ref bean="searchFilterIsAuthorOfPublicationRelation"/>
<ref bean="searchFilterIsProjectOfPublicationRelation"/>
<ref bean="searchFilterIsOrgUnitOfPublicationRelation"/>
<ref bean="searchFilterIsPublicationOfJournalIssueRelation"/>
<ref bean="searchFilterIsJournalOfPublicationRelation"/>
</list>
</property>
<!--The sort filters for the discovery search-->
<property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<property name="sortFields">
<list>
<ref bean="sortScore" />
<ref bean="sortTitle" />
<ref bean="sortDateIssued" />
<ref bean="sortDateAccessioned"/>
</list>
</property>
</bean>
</property>
<!--Any default filter queries, these filter queries will be used for all
queries done by discovery for this configuration -->
<property name="defaultFilterQueries">
<list>
<!--Only find items, communities and collections-->
<value>(search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community</value>
<value>-withdrawn:true AND -discoverable:false</value>
</list>
</property>
<!--The configuration for the recent submissions-->
<property name="recentSubmissionConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryRecentSubmissionsConfiguration">
<property name="metadataSortField" value="dc.date.accessioned" />
<property name="type" value="date"/>
<property name="max" value="20"/>
<!-- If enabled the collection home page will not display metadata but show a pageable list of recent submissions -->
<property name="useAsHomePage" value="false"/>
</bean>
</property>
<!--Default result per page -->
<property name="defaultRpp" value="10" />
<property name="hitHighlightingConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightingConfiguration">
<property name="metadataFields">
<list>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.contributor.author"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dspace.entity.type"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="person.identifier.jobtitle"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.description.abstract"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="2"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.title"/>
<property name="snippets" value="5"/>
</bean>
<!-- By default, full text snippets are disabled, as snippets of embargoed/restricted bitstreams
may appear in search results when the Item is public. See DS-3498
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.status"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.description"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="5"/>
</bean>
-->
</list>
</property>
</bean>
</property>
<property name="moreLikeThisConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryMoreLikeThisConfiguration">
<property name="similarityMetadataFields">
<list>
<value>dc.title</value>
<value>dc.contributor.author</value>
<value>dc.creator</value>
<value>dc.subject</value>
</list>
</property>
<!--The minimum number of matching terms across the metadata fields above before an item is found as related -->
<property name="minTermFrequency" value="5"/>
<!--The maximum number of related items displayed-->
<property name="max" value="3"/>
<!--The minimum word length below which words will be ignored-->
<property name="minWordLength" value="5"/>
</bean>
</property>
<!-- When true a "did you mean" example will be displayed, value can be true or false -->
<property name="spellCheckEnabled" value="true"/>
</bean>
<!--The Homepage specific configuration settings for discovery-->
<bean id="homepageConfiguration" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<!--Which sidebar facets are to be displayed (same as defaultConfiguration above)-->
<property name="sidebarFacets">
<list>
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterEntityType"/>
</list>
</property>
<!-- Set TagCloud configuration per discovery configuration -->
<property name="tagCloudFacetConfiguration" ref="homepageTagCloudFacetConfiguration"/>
<!--The search filters which can be used on the discovery search page (same as defaultConfiguration above)-->
<property name="searchFilters">
<list>
<ref bean="searchFilterTitle" />
<ref bean="searchFilterAuthor" />
<ref bean="searchFilterSubject" />
<ref bean="searchFilterIssued" />
<ref bean="searchFilterContentInOriginalBundle"/>
<ref bean="searchFilterFileNameInOriginalBundle" />
<ref bean="searchFilterFileDescriptionInOriginalBundle" />
<ref bean="searchFilterEntityType"/>
<ref bean="searchFilterIsAuthorOfPublicationRelation"/>
<ref bean="searchFilterIsProjectOfPublicationRelation"/>
<ref bean="searchFilterIsOrgUnitOfPublicationRelation"/>
<ref bean="searchFilterIsPublicationOfJournalIssueRelation"/>
<ref bean="searchFilterIsJournalOfPublicationRelation"/>
</list>
</property>
<!--The sort filters for the discovery search (same as defaultConfiguration above)-->
<property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<property name="sortFields">
<list>
<ref bean="sortScore" />
<ref bean="sortTitle" />
<ref bean="sortDateIssued" />
<ref bean="sortDateAccessioned" />
</list>
</property>
</bean>
</property>
<!--Any default filter queries, these filter queries will be used for all
queries done by discovery for this configuration-->
<property name="defaultFilterQueries">
<list>
<!--Only find items, communities and collections-->
<value>(search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community</value>
<value>-withdrawn:true AND -discoverable:false</value>
</list>
</property>
<!-- Limit recent submissions on homepage to only 5 (default is 20) -->
<property name="recentSubmissionConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryRecentSubmissionsConfiguration">
<property name="metadataSortField" value="dc.date.accessioned" />
<property name="type" value="date"/>
<property name="max" value="5"/>
<property name="useAsHomePage" value="false"/>
</bean>
</property>
<property name="hitHighlightingConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightingConfiguration">
<property name="metadataFields">
<list>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.contributor.author"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dspace.entity.type"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="person.identifier.jobtitle"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.description.abstract"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="2"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="dc.title"/>
<property name="snippets" value="5"/>
</bean>
<!-- By default, full text snippets are disabled, as snippets of embargoed/restricted bitstreams
may appear in search results when the Item is public. See DS-3498
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="project.identifier.status"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.name"/>
<property name="snippets" value="5"/>
</bean>
<bean class="org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration">
<property name="field" value="orgunit.identifier.description"/>
<property name="maxSize" value="250"/>
<property name="snippets" value="5"/>
</bean>
-->
</list>
</property>
</bean>
</property>
<!-- When true a "did you mean" example will be displayed, value can be true or false -->
<property name="spellCheckEnabled" value="true"/>
</bean>
<!--The workspace configuration settings for discovery --> <!--The workspace configuration settings for discovery -->
<bean id="workspaceConfiguration" <bean id="workspaceConfiguration"
@@ -1007,8 +747,11 @@
<!--The sort filters for the discovery search--> <!--The sort filters for the discovery search-->
<property name="searchSortConfiguration"> <property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration"> <bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<!--The default sort filter to use for the initial workspace loading-->
<property name="defaultSortField" ref="sortLastModified" />
<property name="sortFields"> <property name="sortFields">
<list> <list>
<ref bean="sortLastModified" />
<ref bean="sortScore" /> <ref bean="sortScore" />
<ref bean="sortTitle" /> <ref bean="sortTitle" />
<ref bean="sortDateIssued" /> <ref bean="sortDateIssued" />
@@ -1080,6 +823,8 @@
<!--The sort filters for the discovery search--> <!--The sort filters for the discovery search-->
<property name="searchSortConfiguration"> <property name="searchSortConfiguration">
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration"> <bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<!--The default sort filter to use for the initial workspace loading-->
<property name="defaultSortField" ref="sortLastModified" />
<property name="sortFields"> <property name="sortFields">
<list> <list>
<ref bean="sortLastModified" /> <ref bean="sortLastModified" />
@@ -1157,6 +902,7 @@
<bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration"> <bean class="org.dspace.discovery.configuration.DiscoverySortConfiguration">
<property name="sortFields"> <property name="sortFields">
<list> <list>
<ref bean="sortLastModified" />
<ref bean="sortScore" /> <ref bean="sortScore" />
<ref bean="sortTitle" /> <ref bean="sortTitle" />
<ref bean="sortDateIssued" /> <ref bean="sortDateIssued" />
@@ -1424,7 +1170,7 @@
</bean> </bean>
<bean id="publicationRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype"> <bean id="publicationRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="publication"/> <property name="id" value="publicationRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -1557,7 +1303,7 @@
</bean> </bean>
<bean id="personRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype"> <bean id="personRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="person"/> <property name="id" value="personRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -1674,7 +1420,7 @@
</bean> </bean>
<bean id="projectRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype"> <bean id="projectRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" scope="prototype">
<property name="id" value="project"/> <property name="id" value="projectRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -1794,7 +1540,7 @@
<bean id="orgUnitRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" <bean id="orgUnitRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype"> scope="prototype">
<property name="id" value="orgUnit"/> <property name="id" value="orgUnitRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -1919,7 +1665,7 @@
<bean id="journalIssueRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" <bean id="journalIssueRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype"> scope="prototype">
<property name="id" value="journalIssue"/> <property name="id" value="journalIssueRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -2039,7 +1785,7 @@
<bean id="journalVolumeRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" <bean id="journalVolumeRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype"> scope="prototype">
<property name="id" value="journalVolume"/> <property name="id" value="journalVolumeRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -2158,7 +1904,7 @@
<bean id="journalRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration" <bean id="journalRelationships" class="org.dspace.discovery.configuration.DiscoveryConfiguration"
scope="prototype"> scope="prototype">
<property name="id" value="journal"/> <property name="id" value="journalRelationships"/>
<property name="indexAlways" value="true"/> <property name="indexAlways" value="true"/>
<!--Which sidebar facets are to be displayed--> <!--Which sidebar facets are to be displayed-->
<property name="sidebarFacets"> <property name="sidebarFacets">
@@ -2374,18 +2120,6 @@
</bean> </bean>
<!--TagCloud configuration bean for homepage discovery configuration-->
<bean id="homepageTagCloudFacetConfiguration" class="org.dspace.discovery.configuration.TagCloudFacetConfiguration">
<!-- Actual configuration of the tagcloud (colors, sorting, etc.) -->
<property name="tagCloudConfiguration" ref="tagCloudConfiguration"/>
<!-- List of tagclouds to appear, one for every search filter, one after the other -->
<property name="tagCloudFacets">
<list>
<ref bean="searchFilterSubject" />
</list>
</property>
</bean>
<!--TagCloud configuration bean for default discovery configuration--> <!--TagCloud configuration bean for default discovery configuration-->
<bean id="defaultTagCloudFacetConfiguration" class="org.dspace.discovery.configuration.TagCloudFacetConfiguration"> <bean id="defaultTagCloudFacetConfiguration" class="org.dspace.discovery.configuration.TagCloudFacetConfiguration">
<!-- Actual configuration of the tagcloud (colors, sorting, etc.) --> <!-- Actual configuration of the tagcloud (colors, sorting, etc.) -->
@@ -2666,7 +2400,6 @@
<bean id="searchFilterObjectType" <bean id="searchFilterObjectType"
class="org.dspace.discovery.configuration.DiscoverySearchFilterFacet"> class="org.dspace.discovery.configuration.DiscoverySearchFilterFacet">
<property name="indexFieldName" value="resourcetype" /> <property name="indexFieldName" value="resourcetype" />
<property name="type" value="authority" />
<property name="metadataFields"> <property name="metadataFields">
<list> <list>
<value>placeholder.placeholder.placeholder</value> <value>placeholder.placeholder.placeholder</value>

View File

@@ -283,6 +283,7 @@
<!-- used by the DSpace Discovery Solr Indexer to track the last time a document was indexed --> <!-- used by the DSpace Discovery Solr Indexer to track the last time a document was indexed -->
<field name="SolrIndexer.lastIndexed" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" /> <field name="SolrIndexer.lastIndexed" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" />
<field name="lastModified" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" /> <field name="lastModified" type="date" indexed="true" stored="true" default="NOW" multiValued="false" omitNorms="true" />
<copyField source="lastModified" dest="lastModified_dt" />
<!-- used to filter out items that are older versions of another item --> <!-- used to filter out items that are older versions of another item -->
<field name="latestVersion" type="boolean" indexed="true" stored="true" default="true" multiValued="false" omitNorms="true"/> <field name="latestVersion" type="boolean" indexed="true" stored="true" default="true" multiValued="false" omitNorms="true"/>