Merge remote-tracking branch '4science-bitbucket/main' into CST-5587

This commit is contained in:
Luca Giamminonni
2022-06-17 13:28:31 +02:00
159 changed files with 22624 additions and 1598 deletions

View File

@@ -550,10 +550,17 @@
<groupId>com.ibm.icu</groupId> <groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId> <artifactId>icu4j</artifactId>
</dependency> </dependency>
<!-- Codebase at https://github.com/OCLC-Research/oaiharvester2/ -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId> <artifactId>oclc-harvester2</artifactId>
</dependency> </dependency>
<!-- Xalan is REQUIRED by 'oclc-harvester2' listed above (OAI harvesting fails without it).
Please do NOT use Xalan in DSpace codebase as it is not well maintained. -->
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>

View File

@@ -0,0 +1,170 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -925,11 +925,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
rightItem = item; rightItem = item;
} }
// Create the relationship // Create the relationship, appending to the end
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); Relationship persistedRelationship = relationshipService.create(
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); c, leftItem, rightItem, foundRelationshipType, -1, -1
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem, );
foundRelationshipType, leftPlace, rightPlace);
relationshipService.update(c, persistedRelationship); relationshipService.update(c, persistedRelationship);
} }

View File

@@ -403,10 +403,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
} }
// Create the relationship // Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem); Relationship persistedRelationship =
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem); relationshipService.create(c, leftItem, rightItem, foundRelationshipType, -1, -1);
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace);
// relationshipService.update(c, persistedRelationship); // relationshipService.update(c, persistedRelationship);
System.out.println("\tAdded relationship (type: " + relationshipType + ") from " + System.out.println("\tAdded relationship (type: " + relationshipType + ") from " +

View File

@@ -227,7 +227,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
if (StringUtils.isBlank(profileType)) { if (StringUtils.isBlank(profileType)) {
return false; return false;
} }
return profileType.equals(itemService.getEntityType(item)); return profileType.equals(itemService.getEntityTypeLabel(item));
} }
@Override @Override
@@ -251,7 +251,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
Iterator<Item> items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString()); Iterator<Item> items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString());
while (items.hasNext()) { while (items.hasNext()) {
Item item = items.next(); Item item = items.next();
String entityType = itemService.getEntityType(item); String entityType = itemService.getEntityTypeLabel(item);
if (profileType.equals(entityType)) { if (profileType.equals(entityType)) {
return item; return item;
} }

View File

@@ -0,0 +1,175 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.IndexingService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.SolrUtils;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to update solr items with "predb" status to either:
* - Delete them from solr if they're not present in the database
* - Remove their status if they're present in the database
*/
public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliScriptConfiguration> {
/* Log4j logger */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class);
public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex";
private IndexingService indexingService;
private SolrSearchCore solrSearchCore;
private IndexObjectFactoryFactory indexObjectServiceFactory;
private ConfigurationService configurationService;
private int timeUntilReindex = 0;
private String maxTime;
@Override
public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class);
}
public static void runScheduled() throws Exception {
SolrDatabaseResyncCli script = new SolrDatabaseResyncCli();
script.setup();
script.internalRun();
}
@Override
public void setup() throws ParseException {
indexingService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), IndexingService.class);
solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
.getServicesByType(SolrSearchCore.class).get(0);
indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
}
@Override
public void internalRun() throws Exception {
logInfoAndOut("Starting Item resync of Solr and Database...");
timeUntilReindex = getTimeUntilReindex();
maxTime = getMaxTime();
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performStatusUpdate(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
logDebugAndOut("Date range filter used; " + dateRangeFilter);
solrQuery.addFilterQuery(dateRangeFilter);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
if (response != null) {
logInfoAndOut(response.getResults().size() + " items found to process");
for (SolrDocument doc : response.getResults()) {
String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD);
String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID);
logDebugAndOut("Processing item with UUID: " + uuid);
Optional<IndexableObject> indexableObject = Optional.empty();
try {
indexableObject = indexObjectServiceFactory
.getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid);
} catch (SQLException e) {
log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid +
"\" from the database, removing related solr document", e);
}
try {
if (indexableObject.isPresent()) {
logDebugAndOut("Item exists in DB, updating solr document");
updateItem(context, indexableObject.get());
} else {
logDebugAndOut("Item doesn't exist in DB, removing solr document");
removeItem(context, uniqueId);
}
} catch (SQLException | IOException e) {
log.error(e.getMessage(), e);
}
}
}
indexingService.commit();
}
private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException {
Map<String,Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("remove", STATUS_FIELD_PREDB);
indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier);
}
private void removeItem(Context context, String uniqueId) throws IOException, SQLException {
indexingService.unIndexContent(context, uniqueId);
}
private String getMaxTime() {
Calendar cal = Calendar.getInstance();
if (timeUntilReindex > 0) {
cal.add(Calendar.MILLISECOND, -timeUntilReindex);
}
return SolrUtils.getDateFormatter().format(cal.getTime());
}
private int getTimeUntilReindex() {
return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0);
}
private void logInfoAndOut(String message) {
log.info(message);
System.out.println(message);
}
private void logDebugAndOut(String message) {
log.debug(message);
System.out.println(message);
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script.
*/
public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration<SolrDatabaseResyncCli> {
private Class<SolrDatabaseResyncCli> dspaceRunnableClass;
@Override
public Class<SolrDatabaseResyncCli> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<SolrDatabaseResyncCli> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
options = new Options();
}
return options;
}
}

View File

@@ -158,6 +158,11 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
} }
bundle.addBitstream(bitstream); bundle.addBitstream(bitstream);
// If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted
// (when removed from the original bundle)
if (bitstream.isDeleted()) {
bitstream.setDeleted(false);
}
bitstream.getBundles().add(bundle); bitstream.getBundles().add(bundle);

View File

@@ -621,8 +621,14 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
}); });
for (MetadataValue metadataValue : metadataValues) { for (MetadataValue metadataValue : metadataValues) {
//Retrieve & store the place for each metadata value //Retrieve & store the place for each metadata value
if (StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) && if (
((RelationshipMetadataValue) metadataValue).isUseForPlace()) { // For virtual MDVs with useForPlace=true,
// update both the place of the metadatum and the place of the Relationship.
// E.g. for an Author relationship,
// the place should be updated using the same principle as dc.contributor.author.
StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX)
&& ((RelationshipMetadataValue) metadataValue).isUseForPlace()
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace); metadataValue.setPlace(mvPlace);
String authority = metadataValue.getAuthority(); String authority = metadataValue.getAuthority();
@@ -635,8 +641,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
relationshipService.update(context, relationship); relationshipService.update(context, relationship);
} else if (!StringUtils.startsWith(metadataValue.getAuthority(), } else if (
Constants.VIRTUAL_AUTHORITY_PREFIX)) { // Otherwise, just set the place of the metadatum
// ...unless the metadatum in question is a relation.* metadatum.
// This case is a leftover from when a Relationship is removed and copied to metadata.
// If we let its place change the order of any remaining Relationships will be affected.
// todo: this makes it so these leftover MDVs can't be reordered later on
!StringUtils.equals(
metadataValue.getMetadataField().getMetadataSchema().getName(), "relation"
)
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue); int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace); metadataValue.setPlace(mvPlace);
} }

View File

@@ -49,6 +49,7 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService; import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService; import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataSchemaService; import org.dspace.content.service.MetadataSchemaService;
@@ -129,6 +130,9 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
private RelationshipMetadataService relationshipMetadataService; private RelationshipMetadataService relationshipMetadataService;
@Autowired(required = true)
private EntityTypeService entityTypeService;
@Autowired @Autowired
private OrcidTokenService orcidTokenService; private OrcidTokenService orcidTokenService;
@@ -265,6 +269,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return itemDAO.findAll(context, true, true); return itemDAO.findAll(context, true, true);
} }
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
return itemDAO.findAllRegularItems(context);
};
@Override @Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException { public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
return itemDAO.findBySubmitter(context, eperson); return itemDAO.findBySubmitter(context, eperson);
@@ -748,7 +756,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
+ item.getID())); + item.getID()));
// Remove relationships // Remove relationships
for (Relationship relationship : relationshipService.findByItem(context, item)) { for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) {
relationshipService.forceDelete(context, relationship, false, false); relationshipService.forceDelete(context, relationship, false, false);
} }
@@ -1611,8 +1619,36 @@ prevent the generation of resource policy entry values with null dspace_object a
} }
@Override @Override
public String getEntityType(Item item) { public String getEntityTypeLabel(Item item) {
return getMetadataFirstValue(item, new MetadataFieldName("dspace.entity.type"), Item.ANY); List<MetadataValue> mdvs = getMetadata(item, "dspace", "entity", "type", Item.ANY, false);
if (mdvs.isEmpty()) {
return null;
}
if (mdvs.size() > 1) {
log.warn(
"Item with uuid {}, handle {} has {} entity types ({}), expected 1 entity type",
item.getID(), item.getHandle(), mdvs.size(),
mdvs.stream().map(MetadataValue::getValue).collect(Collectors.toList())
);
}
String entityType = mdvs.get(0).getValue();
if (StringUtils.isBlank(entityType)) {
return null;
}
return entityType;
}
@Override
public EntityType getEntityType(Context context, Item item) throws SQLException {
String entityTypeString = getEntityTypeLabel(item);
if (StringUtils.isBlank(entityTypeString)) {
return null;
}
return entityTypeService.findByEntityType(context, entityTypeString);
} }
private void removeOrcidSynchronizationStuff(Context context, Item item) throws SQLException, AuthorizeException { private void removeOrcidSynchronizationStuff(Context context, Item item) throws SQLException, AuthorizeException {

View File

@@ -89,6 +89,15 @@ public class Relationship implements ReloadableEntity<Integer> {
@Column(name = "rightward_value") @Column(name = "rightward_value")
private String rightwardValue; private String rightwardValue;
/**
* Whether the left and/or right side of a given relationship are the "latest".
* A side of a relationship is "latest" if the item on that side has either no other versions,
* or the item on that side is the most recent version that is relevant to the given relationship.
* This column affects what version of an item appears on search pages or the relationship listings of other items.
*/
@Column(name = "latest_version_status")
private LatestVersionStatus latestVersionStatus = LatestVersionStatus.BOTH;
/** /**
* Protected constructor, create object using: * Protected constructor, create object using:
* {@link org.dspace.content.service.RelationshipService#create(Context)} } * {@link org.dspace.content.service.RelationshipService#create(Context)} }
@@ -216,6 +225,39 @@ public class Relationship implements ReloadableEntity<Integer> {
this.rightwardValue = rightwardValue; this.rightwardValue = rightwardValue;
} }
/**
* Getter for {@link #latestVersionStatus}.
* @return the latest version status of this relationship.
*/
public LatestVersionStatus getLatestVersionStatus() {
return latestVersionStatus;
}
/**
* Setter for {@link #latestVersionStatus}.
* @param latestVersionStatus the new latest version status for this relationship.
*/
public void setLatestVersionStatus(LatestVersionStatus latestVersionStatus) {
if (this.latestVersionStatus == latestVersionStatus) {
return; // no change or cache reset needed
}
this.latestVersionStatus = latestVersionStatus;
// on one item, relation.* fields will change
// on the other item, relation.*.latestForDiscovery will change
leftItem.setMetadataModified();
rightItem.setMetadataModified();
}
public enum LatestVersionStatus {
// NOTE: SQL migration expects BOTH to be the first constant in this enum!
BOTH, // both items in this relationship are the "latest"
LEFT_ONLY, // the left-hand item of this relationship is the "latest", but the right-hand item is not
RIGHT_ONLY // the right-hand item of this relationship is the "latest", but the left-hand item is not
// NOTE: one side of any given relationship should ALWAYS be the "latest"
}
/** /**
* Standard getter for the ID for this Relationship * Standard getter for the ID for this Relationship
* @return The ID of this relationship * @return The ID of this relationship

View File

@@ -56,7 +56,9 @@ public interface RelationshipMetadataService {
* This method will retrieve the EntityType String from an item * This method will retrieve the EntityType String from an item
* @param item The Item for which the entityType String will be returned * @param item The Item for which the entityType String will be returned
* @return A String value indicating the entityType * @return A String value indicating the entityType
* @deprecated use {@link org.dspace.content.service.ItemService#getEntityTypeLabel(Item)} instead.
*/ */
@Deprecated
public String getEntityTypeStringFromMetadata(Item item); public String getEntityTypeStringFromMetadata(Item item);
} }

View File

@@ -7,16 +7,24 @@
*/ */
package org.dspace.content; package org.dspace.content;
import static org.dspace.content.RelationshipType.Tilted.LEFT;
import static org.dspace.content.RelationshipType.Tilted.RIGHT;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService; import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration; import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -33,6 +41,12 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
@Autowired(required = true) @Autowired(required = true)
protected RelationshipService relationshipService; protected RelationshipService relationshipService;
@Autowired(required = true)
protected RelationshipTypeService relationshipTypeService;
@Autowired(required = true)
protected ItemService itemService;
@Autowired(required = true) @Autowired(required = true)
protected VirtualMetadataPopulator virtualMetadataPopulator; protected VirtualMetadataPopulator virtualMetadataPopulator;
@@ -44,12 +58,25 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
Context context = new Context(); Context context = new Context();
List<RelationshipMetadataValue> fullMetadataValueList = new LinkedList<>(); List<RelationshipMetadataValue> fullMetadataValueList = new LinkedList<>();
try { try {
String entityType = getEntityTypeStringFromMetadata(item); EntityType entityType = itemService.getEntityType(context, item);
if (StringUtils.isNotBlank(entityType)) { if (entityType != null) {
// NOTE: The following code will add metadata fields of type relation.*.latestForDiscovery
// (e.g. relation.isAuthorOfPublication.latestForDiscovery).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of the other item. In other words, given a relationship with this item,
// the current item should have "latest status" in order for the other item to appear in
// relation.*.latestForDiscovery fields.
fullMetadataValueList.addAll(findLatestForDiscoveryMetadataValues(context, item, entityType));
// NOTE: The following code will, among other things,
// add metadata fields of type relation.* (e.g. relation.isAuthorOfPublication).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of this item. In other words, given a relationship with this item,
// the other item should have "latest status" in order to appear in relation.* fields.
List<Relationship> relationships = relationshipService.findByItem(context, item, -1, -1, true); List<Relationship> relationships = relationshipService.findByItem(context, item, -1, -1, true);
for (Relationship relationship : relationships) { for (Relationship relationship : relationships) {
fullMetadataValueList fullMetadataValueList
.addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType, .addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType.getLabel(),
relationship, enableVirtualMetadata)); relationship, enableVirtualMetadata));
} }
@@ -60,18 +87,92 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
return fullMetadataValueList; return fullMetadataValueList;
} }
public String getEntityTypeStringFromMetadata(Item item) { /**
List<MetadataValue> list = item.getMetadata(); * Create the list of relation.*.latestForDiscovery virtual metadata values for the given item.
for (MetadataValue mdv : list) { * @param context the DSpace context.
if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace") * @param item the item.
&& StringUtils.equals(mdv.getMetadataField().getElement(), "entity") * @param itemEntityType the entity type of the item.
&& StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) { * @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
return mdv.getValue(); */
protected List<RelationshipMetadataValue> findLatestForDiscoveryMetadataValues(
Context context, Item item, EntityType itemEntityType
) throws SQLException {
final String schema = MetadataSchemaEnum.RELATION.getName();
final String qualifier = "latestForDiscovery";
List<RelationshipMetadataValue> mdvs = new LinkedList<>();
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, itemEntityType);
for (RelationshipType relationshipType : relationshipTypes) {
// item is on left side of this relationship type
// NOTE: On the left item, we should index the uuids of the right items. If the relationship type is
// "tilted right", it means that we expect a huge amount of right items, so we don't index their uuids
// on the left item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the right items you won't be able to find the left item.
if (relationshipType.getTilted() != RIGHT && relationshipType.getLeftType().equals(itemEntityType)) {
String element = relationshipType.getLeftwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, true);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
}
// item is on right side of this relationship type
// NOTE: On the right item, we should index the uuids of the left items. If the relationship type is
// "tilted left", it means that we expect a huge amount of left items, so we don't index their uuids
// on the right item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the left items you won't be able to find the right item.
if (relationshipType.getTilted() != LEFT && relationshipType.getRightType().equals(itemEntityType)) {
String element = relationshipType.getRightwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, false);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
} }
} }
return mdvs;
}
/**
* Turn the given data into a list of relation.*.latestForDiscovery virtual metadata values.
* @param context the DSpace context.
* @param schema the schema for all metadata values.
* @param element the element for all metadata values.
* @param qualifier the qualifier for all metadata values.
* @param data a POJO containing the item uuid and relationship id.
* @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
*/
protected List<RelationshipMetadataValue> constructLatestForDiscoveryMetadataValues(
Context context, String schema, String element, String qualifier, List<ItemUuidAndRelationshipId> data
) {
String mdf = new MetadataFieldName(schema, element, qualifier).toString();
return data.stream()
.map(datum -> {
RelationshipMetadataValue mdv = constructMetadataValue(context, mdf);
if (mdv == null) {
return null; return null;
} }
mdv.setAuthority(Constants.VIRTUAL_AUTHORITY_PREFIX + datum.getRelationshipId());
mdv.setValue(datum.getItemUuid().toString());
// NOTE: place has no meaning for relation.*.latestForDiscovery metadata fields
mdv.setPlace(-1);
mdv.setUseForPlace(false);
return mdv;
})
.filter(Objects::nonNull)
.collect(Collectors.toUnmodifiableList());
}
@Override
@Deprecated
public String getEntityTypeStringFromMetadata(Item item) {
return itemService.getEntityTypeLabel(item);
}
@Override @Override
public List<RelationshipMetadataValue> findRelationshipMetadataValueForItemRelationship( public List<RelationshipMetadataValue> findRelationshipMetadataValueForItemRelationship(
Context context, Item item, String entityType, Relationship relationship, boolean enableVirtualMetadata) Context context, Item item, String entityType, Relationship relationship, boolean enableVirtualMetadata)

View File

@@ -10,9 +10,11 @@ package org.dspace.content;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
@@ -20,15 +22,19 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.dao.RelationshipDAO; import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.EntityTypeService; import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService; import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator; import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
public class RelationshipServiceImpl implements RelationshipService { public class RelationshipServiceImpl implements RelationshipService {
@@ -55,6 +61,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Autowired @Autowired
private RelationshipMetadataService relationshipMetadataService; private RelationshipMetadataService relationshipMetadataService;
@Autowired
private RelationshipVersioningUtils relationshipVersioningUtils;
@Autowired @Autowired
private VirtualMetadataPopulator virtualMetadataPopulator; private VirtualMetadataPopulator virtualMetadataPopulator;
@@ -76,9 +86,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override @Override
public Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, public Relationship create(
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
throws AuthorizeException, SQLException { String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException {
Relationship relationship = new Relationship(); Relationship relationship = new Relationship();
relationship.setLeftItem(leftItem); relationship.setLeftItem(leftItem);
relationship.setRightItem(rightItem); relationship.setRightItem(rightItem);
@@ -87,9 +98,21 @@ public class RelationshipServiceImpl implements RelationshipService {
relationship.setRightPlace(rightPlace); relationship.setRightPlace(rightPlace);
relationship.setLeftwardValue(leftwardValue); relationship.setLeftwardValue(leftwardValue);
relationship.setRightwardValue(rightwardValue); relationship.setRightwardValue(rightwardValue);
relationship.setLatestVersionStatus(latestVersionStatus);
return create(c, relationship); return create(c, relationship);
} }
@Override
public Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException {
return create(
c, leftItem, rightItem, relationshipType, leftPlace, rightPlace, leftwardValue, rightwardValue,
LatestVersionStatus.BOTH
);
}
@Override @Override
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException { public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException {
if (isRelationshipValidToCreate(context, relationship)) { if (isRelationshipValidToCreate(context, relationship)) {
@@ -98,7 +121,7 @@ public class RelationshipServiceImpl implements RelationshipService {
// This order of execution should be handled in the creation (create, updateplace, update relationship) // This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation // for a proper place allocation
Relationship relationshipToReturn = relationshipDAO.create(context, relationship); Relationship relationshipToReturn = relationshipDAO.create(context, relationship);
updatePlaceInRelationship(context, relationshipToReturn); updatePlaceInRelationship(context, relationshipToReturn, null, null, true, true);
update(context, relationshipToReturn); update(context, relationshipToReturn);
updateItemsInRelationship(context, relationship); updateItemsInRelationship(context, relationship);
return relationshipToReturn; return relationshipToReturn;
@@ -113,71 +136,388 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public void updatePlaceInRelationship(Context context, Relationship relationship) public Relationship move(
throws SQLException, AuthorizeException { Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace
) throws SQLException, AuthorizeException {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
// Don't do anything if neither the leftPlace nor rightPlace was updated
if (newLeftPlace != null || newRightPlace != null) {
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, newLeftPlace, newRightPlace, false, false);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
} else {
throw new AuthorizeException(
"You do not have write rights on this relationship's items");
}
}
@Override
public Relationship move(
Context context, Relationship relationship, Item newLeftItem, Item newRightItem
) throws SQLException, AuthorizeException {
// If the new Item is the same as the current Item, don't move
newLeftItem = newLeftItem != relationship.getLeftItem() ? newLeftItem : null;
newRightItem = newRightItem != relationship.getRightItem() ? newRightItem : null;
// Don't do anything if neither the leftItem nor rightItem was updated
if (newLeftItem != null || newRightItem != null) {
// First move the Relationship to the back within the current Item's lists
// This ensures that we won't have any gaps once we move the Relationship to a different Item
move(
context, relationship,
newLeftItem != null ? -1 : null,
newRightItem != null ? -1 : null
);
boolean insertLeft = false;
boolean insertRight = false;
// If Item has been changed, mark the previous Item as modified to make sure we discard the old relation.*
// metadata on the next update.
// Set the Relationship's Items to the new ones, appending to the end
if (newLeftItem != null) {
relationship.getLeftItem().setMetadataModified();
relationship.setLeftItem(newLeftItem);
relationship.setLeftPlace(-1);
insertLeft = true;
}
if (newRightItem != null) {
relationship.getRightItem().setMetadataModified();
relationship.setRightItem(newRightItem);
relationship.setRightPlace(-1);
insertRight = true;
}
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, null, null, insertLeft, insertRight);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
}
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relationship.
*
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have its place updated and that will be used
* to retrieve the other relationships whose place might need to be updated.
* @param newLeftPlace If the Relationship in question is to be moved, the leftPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param newRightPlace If the Relationship in question is to be moved, the rightPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param insertLeft Whether the Relationship in question should be inserted into the left Item.
* Should be set to true when creating or moving to a different Item.
* @param insertRight Whether the Relationship in question should be inserted into the right Item.
* Should be set to true when creating or moving to a different Item.
* @throws SQLException If something goes wrong
* @throws AuthorizeException
* If the user is not authorized to update the Relationship or its Items
*/
private void updatePlaceInRelationship(
Context context, Relationship relationship,
Integer newLeftPlace, Integer newRightPlace, boolean insertLeft, boolean insertRight
) throws SQLException, AuthorizeException {
Item leftItem = relationship.getLeftItem(); Item leftItem = relationship.getLeftItem();
// Max value is used to ensure that these will get added to the back of the list and thus receive the highest
// (last) place as it's set to a -1 for creation
if (relationship.getLeftPlace() == -1) {
relationship.setLeftPlace(Integer.MAX_VALUE);
}
Item rightItem = relationship.getRightItem(); Item rightItem = relationship.getRightItem();
if (relationship.getRightPlace() == -1) {
relationship.setRightPlace(Integer.MAX_VALUE); // These list also include the non-latest. This is relevant to determine whether it's deleted.
} // This can also imply there may be overlapping places, and/or the given relationship will overlap
List<Relationship> leftRelationships = findByItemAndRelationshipType(context, // But the shift will allow this, and only happen when needed based on the latest status
leftItem, List<Relationship> leftRelationships = findByItemAndRelationshipType(
relationship.getRelationshipType(), true); context, leftItem, relationship.getRelationshipType(), true, -1, -1, false
List<Relationship> rightRelationships = findByItemAndRelationshipType(context, );
rightItem, List<Relationship> rightRelationships = findByItemAndRelationshipType(
relationship.getRelationshipType(), context, rightItem, relationship.getRelationshipType(), false, -1, -1, false
false); );
// These relationships are only deleted from the temporary lists in case they're present in them so that we can // These relationships are only deleted from the temporary lists in case they're present in them so that we can
// properly perform our place calculation later down the line in this method. // properly perform our place calculation later down the line in this method.
if (leftRelationships.contains(relationship)) { boolean deletedFromLeft = !leftRelationships.contains(relationship);
boolean deletedFromRight = !rightRelationships.contains(relationship);
leftRelationships.remove(relationship); leftRelationships.remove(relationship);
}
if (rightRelationships.contains(relationship)) {
rightRelationships.remove(relationship); rightRelationships.remove(relationship);
}
List<MetadataValue> leftMetadata = getSiblingMetadata(leftItem, relationship, true);
List<MetadataValue> rightMetadata = getSiblingMetadata(rightItem, relationship, false);
// For new relationships added to the end, this will be -1.
// For new relationships added at a specific position, this will contain that position.
// For existing relationships, this will contain the place before it was moved.
// For deleted relationships, this will contain the place before it was deleted.
int oldLeftPlace = relationship.getLeftPlace();
int oldRightPlace = relationship.getRightPlace();
boolean movedUpLeft = resolveRelationshipPlace(
relationship, true, leftRelationships, leftMetadata, oldLeftPlace, newLeftPlace
);
boolean movedUpRight = resolveRelationshipPlace(
relationship, false, rightRelationships, rightMetadata, oldRightPlace, newRightPlace
);
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
//If useForPlace for the leftwardType is false for the relationshipType,
// we need to sort the relationships here based on leftplace. //only shift if the place is relevant for the latest relationships
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), true)) { if (relationshipVersioningUtils.otherSideIsLatest(true, relationship.getLatestVersionStatus())) {
if (!leftRelationships.isEmpty()) { shiftSiblings(
leftRelationships.sort(Comparator.comparingInt(Relationship::getLeftPlace)); relationship, true, oldLeftPlace, movedUpLeft, insertLeft, deletedFromLeft,
for (int i = 0; i < leftRelationships.size(); i++) { leftRelationships, leftMetadata
leftRelationships.get(i).setLeftPlace(i); );
} }
relationship.setLeftPlace(leftRelationships.size()); if (relationshipVersioningUtils.otherSideIsLatest(false, relationship.getLatestVersionStatus())) {
} else { shiftSiblings(
relationship.setLeftPlace(0); relationship, false, oldRightPlace, movedUpRight, insertRight, deletedFromRight,
rightRelationships, rightMetadata
);
} }
} else {
updateItem(context, leftItem); updateItem(context, leftItem);
}
//If useForPlace for the rightwardType is false for the relationshipType,
// we need to sort the relationships here based on the rightplace.
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), false)) {
if (!rightRelationships.isEmpty()) {
rightRelationships.sort(Comparator.comparingInt(Relationship::getRightPlace));
for (int i = 0; i < rightRelationships.size(); i++) {
rightRelationships.get(i).setRightPlace(i);
}
relationship.setRightPlace(rightRelationships.size());
} else {
relationship.setRightPlace(0);
}
} else {
updateItem(context, rightItem); updateItem(context, rightItem);
}
context.restoreAuthSystemState(); context.restoreAuthSystemState();
}
/**
* Return the MDVs in the Item's MDF corresponding to the given Relationship.
* Return an empty list if the Relationship isn't mapped to any MDF
* or if the mapping is configured with useForPlace=false.
*
* This returns actual metadata (not virtual) which in the same metadata field as the useForPlace.
* For a publication with 2 author relationships and 3 plain text dc.contributor.author values,
* it would return the 3 plain text dc.contributor.author values.
* For a person related to publications, it would return an empty list.
*/
private List<MetadataValue> getSiblingMetadata(
Item item, Relationship relationship, boolean isLeft
) {
List<MetadataValue> metadata = new ArrayList<>();
if (virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), isLeft)) {
HashMap<String, VirtualMetadataConfiguration> mapping;
if (isLeft) {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getLeftwardType());
} else {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getRightwardType());
}
if (mapping != null) {
for (String mdf : mapping.keySet()) {
metadata.addAll(
// Make sure we're only looking at database MDVs; if the relationship currently overlaps
// one of these, its virtual MDV will overwrite the database MDV in itemService.getMetadata()
// The relationship pass should be sufficient to move any sibling virtual MDVs.
item.getMetadata()
.stream()
.filter(mdv -> mdv.getMetadataField().toString().equals(mdf.replace(".", "_")))
.collect(Collectors.toList())
);
}
}
}
return metadata;
}
/**
* Set the left/right place of a Relationship
* - To a new place in case it's being moved
* - Resolve -1 to the actual last place based on the places of its sibling Relationships and/or MDVs
* and determine if it has been moved up in the list.
*
* Examples:
* - Insert a Relationship at place 3
* newPlace starts out as null and is not updated. Return movedUp=false
* - Insert a Relationship at place -1
* newPlace starts out as null and is resolved to e.g. 6. Update the Relationship and return movedUp=false
* - Move a Relationship from place 4 to 2
* Update the Relationship and return movedUp=false.
* - Move a Relationship from place 2 to -1
* newPlace starts out as -1 and is resolved to e.g. 5. Update the relationship and return movedUp=true.
* - Remove a relationship from place 1
* Return movedUp=false
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param newPlace The new place for this Relationship. Will be null on insert/delete.
* @return true if the Relationship was moved and newPlace > oldPlace
*/
private boolean resolveRelationshipPlace(
Relationship relationship, boolean isLeft,
List<Relationship> relationships, List<MetadataValue> metadata,
int oldPlace, Integer newPlace
) {
boolean movedUp = false;
if (newPlace != null) {
// We're moving an existing Relationship...
if (newPlace == -1) {
// ...to the end of the list
int nextPlace = getNextPlace(relationships, metadata, isLeft);
if (nextPlace == oldPlace) {
// If this Relationship is already at the end, do nothing.
newPlace = oldPlace;
} else {
// Subtract 1 from the next place since we're moving, not inserting and
// the total number of Relationships stays the same.
newPlace = nextPlace - 1;
}
}
if (newPlace > oldPlace) {
// ...up the list. We have to keep track of this in order to shift correctly later on
movedUp = true;
}
} else if (oldPlace == -1) {
// We're _not_ moving an existing Relationship. The newPlace is already set in the Relationship object.
// We only need to resolve it to the end of the list if it's set to -1, otherwise we can just keep it as is.
newPlace = getNextPlace(relationships, metadata, isLeft);
}
if (newPlace != null) {
setPlace(relationship, isLeft, newPlace);
}
return movedUp;
}
/**
* Return the index of the next place in a list of Relationships and Metadata.
* By not relying on the size of both lists we can support one-to-many virtual MDV mappings.
* @param isLeft whether to take the left or right place of each Relationship
*/
private int getNextPlace(List<Relationship> relationships, List<MetadataValue> metadata, boolean isLeft) {
return Stream.concat(
metadata.stream().map(MetadataValue::getPlace),
relationships.stream().map(r -> getPlace(r, isLeft))
).max(Integer::compare)
.map(integer -> integer + 1)
.orElse(0);
}
/**
* Adjust the left/right place of sibling Relationships and MDVs
*
* Examples: with sibling Relationships R,S,T and metadata a,b,c
* - Insert T at place 1 aRbSc -> a T RbSc
* Shift all siblings with place >= 1 one place to the right
* - Delete R from place 2 aT R bSc -> aTbSc
* Shift all siblings with place > 2 one place to the left
* - Move S from place 3 to place 2 (movedUp=false) aTb S c -> aT S bc
* Shift all siblings with 2 < place <= 3 one place to the right
* - Move T from place 1 to place 3 (movedUp=true) a T Sbc -> aSb T c
* Shift all siblings with 1 < place <= 3 one place to the left
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param movedUp if this Relationship has been moved up the list, e.g. from place 2 to place 4
* @param deleted whether this Relationship has been deleted
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
*/
private void shiftSiblings(
Relationship relationship, boolean isLeft, int oldPlace, boolean movedUp, boolean inserted, boolean deleted,
List<Relationship> relationships, List<MetadataValue> metadata
) {
int newPlace = getPlace(relationship, isLeft);
for (Relationship sibling : relationships) {
// NOTE: If and only if the other side of the relationship has "latest" status, the relationship will appear
// as a metadata value on the item at the current side (indicated by isLeft) of the relationship.
//
// Example: volume <----> issue (LEFT_ONLY)
// => LEFT_ONLY means that the volume has "latest" status, but the issue does NOT have "latest" status
// => the volume will appear in the metadata of the issue,
// but the issue will NOT appear in the metadata of the volume
//
// This means that the other side of the relationship has to have "latest" status, otherwise this
// relationship is NOT relevant for place calculation.
if (relationshipVersioningUtils.otherSideIsLatest(isLeft, sibling.getLatestVersionStatus())) {
int siblingPlace = getPlace(sibling, isLeft);
if (
(deleted && siblingPlace > newPlace)
// If the relationship was deleted, all relationships after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
|| (movedUp && siblingPlace <= newPlace && siblingPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all relationships
// with place > 2 (the old place) and <= to 5 should shift left
) {
setPlace(sibling, isLeft, siblingPlace - 1);
} else if (
(inserted && siblingPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && siblingPlace >= newPlace && siblingPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
setPlace(sibling, isLeft, siblingPlace + 1);
}
}
}
for (MetadataValue mdv : metadata) {
// NOTE: Plain text metadata values should ALWAYS be included in the place calculation,
// because they are by definition only visible/relevant to the side of the relationship
// that we are currently processing.
int mdvPlace = mdv.getPlace();
if (
(deleted && mdvPlace > newPlace)
// If the relationship was deleted, all metadata after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
// If the reltionship was copied to metadata on deletion:
// - the plain text can be after the relationship (in which case it's moved forward again)
// - or before the relationship (in which case it remains in place)
|| (movedUp && mdvPlace <= newPlace && mdvPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all metadata
// with place > 2 (the old place) and <= to 5 should shift left
) {
mdv.setPlace(mdvPlace - 1);
} else if (
(inserted && mdvPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && mdvPlace >= newPlace && mdvPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
mdv.setPlace(mdvPlace + 1);
}
}
}
private int getPlace(Relationship relationship, boolean isLeft) {
if (isLeft) {
return relationship.getLeftPlace();
} else {
return relationship.getRightPlace();
}
}
private void setPlace(Relationship relationship, boolean isLeft, int place) {
if (isLeft) {
relationship.setLeftPlace(place);
} else {
relationship.setRightPlace(place);
}
} }
@Override @Override
@@ -187,16 +527,6 @@ public class RelationshipServiceImpl implements RelationshipService {
itemService.update(context, relatedItem); itemService.update(context, relatedItem);
} }
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextLeftPlaceByLeftItem(context, item);
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextRightPlaceByRightItem(context, item);
}
private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException { private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException {
RelationshipType relationshipType = relationship.getRelationshipType(); RelationshipType relationshipType = relationship.getRelationshipType();
@@ -212,15 +542,19 @@ public class RelationshipServiceImpl implements RelationshipService {
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
return false; return false;
} }
if (!verifyMaxCardinality(context, relationship.getLeftItem(), if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.LEFT_ONLY)
&& !verifyMaxCardinality(context, relationship.getLeftItem(),
relationshipType.getLeftMaxCardinality(), relationshipType, true)) { relationshipType.getLeftMaxCardinality(), relationshipType, true)) {
//If RIGHT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the left item has more" + log.warn("The relationship has been deemed invalid since the left item has more" +
" relationships than the left max cardinality allows after we'd store this relationship"); " relationships than the left max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
return false; return false;
} }
if (!verifyMaxCardinality(context, relationship.getRightItem(), if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.RIGHT_ONLY)
&& !verifyMaxCardinality(context, relationship.getRightItem(),
relationshipType.getRightMaxCardinality(), relationshipType, false)) { relationshipType.getRightMaxCardinality(), relationshipType, false)) {
//If LEFT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the right item has more" + log.warn("The relationship has been deemed invalid since the right item has more" +
" relationships than the right max cardinality allows after we'd store this relationship"); " relationships than the right max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType); logRelationshipTypeDetailsForError(relationshipType);
@@ -279,10 +613,18 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, public List<Relationship> findByItem(
boolean excludeTilted) throws SQLException { Context context, Item item, Integer limit, Integer offset, boolean excludeTilted
) throws SQLException {
return findByItem(context, item, limit, offset, excludeTilted, true);
}
List<Relationship> list = relationshipDAO.findByItem(context, item, limit, offset, excludeTilted); @Override
public List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
List<Relationship> list =
relationshipDAO.findByItem(context, item, limit, offset, excludeTilted, excludeNonLatest);
list.sort((o1, o2) -> { list.sort((o1, o2) -> {
int relationshipType = o1.getRelationshipType().getLeftwardType() int relationshipType = o1.getRelationshipType().getLeftwardType()
@@ -377,7 +719,7 @@ public class RelationshipServiceImpl implements RelationshipService {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) || if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) { authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
relationshipDAO.delete(context, relationship); relationshipDAO.delete(context, relationship);
updatePlaceInRelationship(context, relationship); updatePlaceInRelationship(context, relationship, null, null, false, false);
updateItemsInRelationship(context, relationship); updateItemsInRelationship(context, relationship);
} else { } else {
throw new AuthorizeException( throw new AuthorizeException(
@@ -450,7 +792,7 @@ public class RelationshipServiceImpl implements RelationshipService {
+ item.getID() + " due to " + currentDepth + " depth"); + item.getID() + " due to " + currentDepth + " depth");
return; return;
} }
String entityTypeStringFromMetadata = relationshipMetadataService.getEntityTypeStringFromMetadata(item); String entityTypeStringFromMetadata = itemService.getEntityTypeLabel(item);
EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata); EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata);
// Get all types of relations for the current item // Get all types of relations for the current item
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType); List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType);
@@ -510,6 +852,9 @@ public class RelationshipServiceImpl implements RelationshipService {
/** /**
* Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata. * Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata.
* The resulting MDVs are added in front or behind the Relationship's virtual MDVs.
* The Relationship's virtual MDVs may be shifted right, and all subsequent metadata will be shifted right.
* So this method ensures the places are still valid.
* *
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationship The relationship containing the left and right items * @param relationship The relationship containing the left and right items
@@ -520,13 +865,20 @@ public class RelationshipServiceImpl implements RelationshipService {
boolean copyToRightItem) boolean copyToRightItem)
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
if (copyToLeftItem) { if (copyToLeftItem) {
String entityTypeString = relationshipMetadataService String entityTypeString = itemService.getEntityTypeLabel(relationship.getLeftItem());
.getEntityTypeStringFromMetadata(relationship.getLeftItem());
List<RelationshipMetadataValue> relationshipMetadataValues = List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getLeftItem(), entityTypeString, relationship, true); relationship.getLeftItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getLeftItem(), // This adds the plain text metadata values on the same spot as the virtual values.
// This will be overruled in org.dspace.content.DSpaceObjectServiceImpl.update
// in the line below but it's not important whether the plain text or virtual values end up on top.
// The virtual values will eventually be deleted, and the others shifted
// This is required because addAndShiftRightMetadata has issues on metadata fields containing
// relationship values which are not useForPlace, while the relationhip type has useForPlace
// E.g. when using addAndShiftRightMetadata on relation.isAuthorOfPublication, it will break the order
// from dc.contributor.author
itemService.addMetadata(context, relationship.getLeftItem(),
relationshipMetadataValue.getMetadataField(). relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(), getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(), relationshipMetadataValue.getMetadataField().getElement(),
@@ -535,16 +887,16 @@ public class RelationshipServiceImpl implements RelationshipService {
relationshipMetadataValue.getValue(), null, -1, relationshipMetadataValue.getValue(), null, -1,
relationshipMetadataValue.getPlace()); relationshipMetadataValue.getPlace());
} }
//This will ensure the new values no longer overlap, but won't break the order
itemService.update(context, relationship.getLeftItem()); itemService.update(context, relationship.getLeftItem());
} }
if (copyToRightItem) { if (copyToRightItem) {
String entityTypeString = relationshipMetadataService String entityTypeString = itemService.getEntityTypeLabel(relationship.getRightItem());
.getEntityTypeStringFromMetadata(relationship.getRightItem());
List<RelationshipMetadataValue> relationshipMetadataValues = List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context, relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getRightItem(), entityTypeString, relationship, true); relationship.getRightItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) { for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getRightItem(), itemService.addMetadata(context, relationship.getRightItem(),
relationshipMetadataValue.getMetadataField(). relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(), getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(), relationshipMetadataValue.getMetadataField().getElement(),
@@ -638,22 +990,46 @@ public class RelationshipServiceImpl implements RelationshipService {
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType) RelationshipType relationshipType)
throws SQLException { throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, -1, -1); return findByItemAndRelationshipType(context, item, relationshipType, -1, -1, true);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, int limit, int offset) RelationshipType relationshipType, int limit, int offset)
throws SQLException { throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, limit, offset); return findByItemAndRelationshipType(context, item, relationshipType, limit, offset, true);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, boolean isLeft, Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
int limit, int offset) ) throws SQLException {
throws SQLException { return relationshipDAO
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset); .findByItemAndRelationshipType(context, item, relationshipType, limit, offset, excludeNonLatest);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset
) throws SQLException {
return findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, true);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, excludeNonLatest);
}
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
return relationshipDAO
.findByLatestItemAndRelationshipType(context, latestItem, relationshipType, isLeft);
} }
@Override @Override
@@ -690,7 +1066,14 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override @Override
public int countByItem(Context context, Item item) throws SQLException { public int countByItem(Context context, Item item) throws SQLException {
return relationshipDAO.countByItem(context, item); return countByItem(context, item, false, true);
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO.countByItem(context, item, excludeTilted, excludeNonLatest);
} }
@Override @Override
@@ -699,9 +1082,18 @@ public class RelationshipServiceImpl implements RelationshipService {
} }
@Override @Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, public int countByItemAndRelationshipType(
boolean isLeft) throws SQLException { Context context, Item item, RelationshipType relationshipType, boolean isLeft
return relationshipDAO.countByItemAndRelationshipType(context, item, relationshipType, isLeft); ) throws SQLException {
return countByItemAndRelationshipType(context, item, relationshipType, isLeft, true);
}
@Override
public int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.countByItemAndRelationshipType(context, item, relationshipType, isLeft, excludeNonLatest);
} }
@Override @Override

View File

@@ -128,20 +128,24 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
Optional<MetadataValue> colEntityType = getDSpaceEntityType(collection); Optional<MetadataValue> colEntityType = getDSpaceEntityType(collection);
Optional<MetadataValue> templateItemEntityType = getDSpaceEntityType(templateItem); Optional<MetadataValue> templateItemEntityType = getDSpaceEntityType(templateItem);
if (colEntityType.isPresent() && templateItemEntityType.isPresent() && if (template && colEntityType.isPresent() && templateItemEntityType.isPresent() &&
!StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) { !StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) {
throw new IllegalStateException("The template item has entity type : (" + throw new IllegalStateException("The template item has entity type : (" +
templateItemEntityType.get().getValue() + ") different than collection entity type : " + templateItemEntityType.get().getValue() + ") different than collection entity type : " +
colEntityType.get().getValue()); colEntityType.get().getValue());
} }
if (colEntityType.isPresent() && templateItemEntityType.isEmpty()) { if (template && colEntityType.isPresent() && templateItemEntityType.isEmpty()) {
MetadataValue original = colEntityType.get(); MetadataValue original = colEntityType.get();
MetadataField metadataField = original.getMetadataField(); MetadataField metadataField = original.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
// NOTE: dspace.entity.type = <blank> does not make sense
// the collection entity type is by default blank when a collection is first created
if (StringUtils.isNotBlank(original.getValue())) {
itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(), itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), original.getLanguage(), original.getValue()); metadataField.getQualifier(), original.getLanguage(), original.getValue());
} }
}
if (template && (templateItem != null)) { if (template && (templateItem != null)) {
List<MetadataValue> md = itemService.getMetadata(templateItem, Item.ANY, Item.ANY, Item.ANY, Item.ANY); List<MetadataValue> md = itemService.getMetadata(templateItem, Item.ANY, Item.ANY, Item.ANY, Item.ANY);

View File

@@ -32,8 +32,22 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException; public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException;
@Deprecated
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException; public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/** /**
* Find all Items modified since a Date. * Find all Items modified since a Date.
* *

View File

@@ -14,6 +14,7 @@ import java.util.UUID;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.GenericDAO; import org.dspace.core.GenericDAO;
@@ -32,11 +33,15 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* @param item The item that should be either a leftItem or a rightItem of all * @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list * the Relationship objects in the returned list
* @param excludeTilted If true, excludes tilted relationships * @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that contain either a left or a * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item * right item that is equal to the given item
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException; List<Relationship> findByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/** /**
* This method returns a list of Relationship objects that have the given Item object * This method returns a list of Relationship objects that have the given Item object
@@ -47,34 +52,15 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships * @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that contain either a left or a * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item * right item that is equal to the given item
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) List<Relationship> findByItem(
throws SQLException; Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on leftItem
* @return The next integer to be used for the leftplace of a relationship with the given item
* as a left item
* @throws SQLException If something goes wrong
*/
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException;
/**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on rightItem
* @return The next integer to be used for the rightplace of a relationship with the given item
* as a right item
* @throws SQLException If something goes wrong
*/
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given RelationshipType object. * This method returns a list of Relationship objects for the given RelationshipType object.
@@ -108,34 +94,69 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* It will construct a list of all Relationship objects that have the given RelationshipType object * It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property * as the relationshipType property
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on * @param relationshipType The RelationshipType object to be checked on
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param item item to filter by * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the * @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property * relationshipType property
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, List<Relationship> findByItemAndRelationshipType(
Integer limit, Integer offset) throws SQLException; Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given RelationshipType object. * This method returns a list of Relationship objects for the given RelationshipType object.
* It will construct a list of all Relationship objects that have the given RelationshipType object * It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property * as the relationshipType property
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on * @param relationshipType The RelationshipType object to be checked on
* @param isLeft Is item left or right
* @param limit paging limit * @param limit paging limit
* @param offset paging offset * @param offset paging offset
* @param item item to filter by * @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* @param isLeft Is item left or right * that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the * @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property * relationshipType property
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, List<Relationship> findByItemAndRelationshipType(
boolean isLeft, Integer limit, Integer offset) Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
throws SQLException; boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/** /**
* This method returns a list of Relationship objects for the given typeName * This method returns a list of Relationship objects for the given typeName
@@ -186,11 +207,14 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all * @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list * the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a * @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item * right item that is equal to the given item
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
int countByItem(Context context, Item item) throws SQLException; int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean * Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean
@@ -199,12 +223,15 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @param item item to filter by * @param item item to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not * @param isLeft indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return total count * @return total count
* @throws SQLException if database error * @throws SQLException if database error
*/ */
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) int countByItemAndRelationshipType(
throws SQLException; Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) given a typeName * Count total number of relationships (rows in relationship table) given a typeName

View File

@@ -79,6 +79,20 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query); return iterate(query);
} }
@Override
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
// NOTE: This query includes archived items, withdrawn items and older versions of items.
// It does not include workspace, workflow or template items.
Query query = createQuery(
context,
"SELECT i FROM Item as i " +
"LEFT JOIN Version as v ON i = v.item " +
"WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " +
"ORDER BY i.id"
);
return iterate(query);
}
@Override @Override
public Iterator<Item> findAll(Context context, boolean archived, public Iterator<Item> findAll(Context context, boolean archived,
boolean withdrawn, boolean discoverable, Date lastModified) boolean withdrawn, boolean discoverable, Date lastModified)

View File

@@ -11,17 +11,22 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors;
import javax.persistence.Query; import javax.persistence.Query;
import javax.persistence.Tuple;
import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root; import javax.persistence.criteria.Root;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Item_;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.RelationshipType_; import org.dspace.content.RelationshipType_;
import org.dspace.content.Relationship_; import org.dspace.content.Relationship_;
import org.dspace.content.dao.RelationshipDAO; import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.RelationshipTypeService; import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.AbstractHibernateDAO;
@@ -30,95 +35,152 @@ import org.dspace.core.Context;
public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> implements RelationshipDAO { public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> implements RelationshipDAO {
@Override @Override
public List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException { public List<Relationship> findByItem(
return findByItem(context, item, -1, -1, excludeTilted); Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return findByItem(context, item, -1, -1, excludeTilted, excludeNonLatest);
} }
@Override @Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, public List<Relationship> findByItem(
boolean excludeTilted) throws SQLException { Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery<Relationship> criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (excludeTilted) {
// If this item is the left item, criteriaQuery.where(
// return relationships for types which are not tilted right (tilted is either left nor null)
// If this item is the right item,
// return relationships for types which are not tilted left (tilted is either right nor null)
criteriaQuery
.where(criteriaBuilder.or(
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.or( criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType) getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
.get(RelationshipType_.tilted)), getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
criteriaBuilder.notEqual(relationshipRoot )
.get(Relationship_.relationshipType) );
.get(RelationshipType_.tilted), RelationshipType.Tilted.RIGHT))),
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item),
criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted)),
criteriaBuilder.notEqual(relationshipRoot
.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted), RelationshipType.Tilted.LEFT)))));
} else {
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)));
}
return list(context, criteriaQuery, false, Relationship.class, limit, offset); return list(context, criteriaQuery, false, Relationship.class, limit, offset);
} }
@Override /**
public int countByItem(Context context, Item item) * Get the predicate for a criteria query that selects relationships by their left item.
throws SQLException { * @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getLeftItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)
);
if (excludeTilted) {
// if this item is the left item,
// return relationships for types which are NOT tilted right (tilted is either left nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.RIGHT
)
)
);
}
if (excludeNonLatest) {
// if this item is the left item,
// return relationships for which the right item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
/**
* Get the predicate for a criteria query that selects relationships by their right item.
* @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getRightItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)
);
if (excludeTilted) {
// if this item is the right item,
// return relationships for types which are NOT tilted left (tilted is either right nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.LEFT
)
)
);
}
if (excludeNonLatest) {
// if this item is the right item,
// return relationships for which the left item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); criteriaBuilder.or(
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
)
);
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
} }
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getLeftPlace() - o1.getLeftPlace());
if (!list.isEmpty()) {
return list.get(0).getLeftPlace() + 1;
} else {
return 0;
}
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getRightPlace() - o1.getRightPlace());
if (!list.isEmpty()) {
return list.get(0).getRightPlace() + 1;
} else {
return 0;
}
}
@Override @Override
public List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) public List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType)
throws SQLException { throws SQLException {
@@ -140,49 +202,132 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, Integer limit, Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
Integer offset) boolean excludeNonLatest
throws SQLException { ) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaQuery.where(
relationshipType), criteriaBuilder.or criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item), criteriaBuilder.or(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item))); getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
)
);
return list(context, criteriaQuery, true, Relationship.class, limit, offset); return list(context, criteriaQuery, true, Relationship.class, limit, offset);
} }
@Override @Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item, public List<Relationship> findByItemAndRelationshipType(
RelationshipType relationshipType, boolean isLeft, Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
Integer limit, Integer offset) boolean excludeNonLatest
throws SQLException { ) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (isLeft) { if (isLeft) {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); );
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace))); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace)));
} else { } else {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); );
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace))); criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace)));
} }
return list(context, criteriaQuery, true, Relationship.class, limit, offset); return list(context, criteriaQuery, true, Relationship.class, limit, offset);
} }
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
final String relationshipIdAlias = "relationshipId";
final String itemUuidAlias = "itemUuid";
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
ArrayList<Predicate> predicates = new ArrayList<>();
// all relationships should have the specified relationship type
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType)
);
if (isLeft) {
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), latestItem)
);
// the left item has to have "latest status" => accept BOTH and LEFT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
// return the UUIDs of the right item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.rightItem).get(Item_.id).alias(itemUuidAlias)
);
} else {
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), latestItem)
);
// the right item has to have "latest status" => accept BOTH and RIGHT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
// return the UUIDs of the left item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.leftItem).get(Item_.id).alias(itemUuidAlias)
);
}
// all predicates are chained with the AND operator
criteriaQuery.where(predicates.toArray(new Predicate[]{}));
// deduplicate result
criteriaQuery.distinct(true);
// execute query
Query query = this.getHibernateSession(context).createQuery(criteriaQuery);
query.setHint("org.hibernate.cacheable", true);
List<?> resultList = query.getResultList();
// convert types
return resultList.stream()
.map(Tuple.class::cast)
.map(t -> new ItemUuidAndRelationshipId(
(UUID) t.get(itemUuidAlias),
(Integer) t.get(relationshipIdAlias)
))
.collect(Collectors.toList());
}
@Override @Override
public List<Relationship> findByTypeName(Context context, String typeName) public List<Relationship> findByTypeName(Context context, String typeName)
throws SQLException { throws SQLException {
@@ -228,24 +373,26 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
} }
@Override @Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, public int countByItemAndRelationshipType(
boolean isLeft) throws SQLException { Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class); CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class); Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot); criteriaQuery.select(relationshipRoot);
if (isLeft) { if (isLeft) {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)); );
} else { } else {
criteriaQuery criteriaQuery.where(
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
relationshipType), getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)); );
} }
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot); return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
} }

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao.pojo;
import java.util.UUID;
import org.dspace.content.Relationship;
import org.dspace.content.dao.RelationshipDAO;
import org.springframework.lang.NonNull;
/**
* Used by {@link RelationshipDAO#findByLatestItemAndRelationshipType} to avoid creating {@link Relationship}s.
*/
public class ItemUuidAndRelationshipId {
private final UUID itemUuid;
private final int relationshipId;
public ItemUuidAndRelationshipId(@NonNull UUID itemUuid, @NonNull int relationshipId) {
this.itemUuid = itemUuid;
this.relationshipId = relationshipId;
}
public UUID getItemUuid() {
return this.itemUuid;
}
public int getRelationshipId() {
return this.relationshipId;
}
}

View File

@@ -21,6 +21,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle; import org.dspace.content.Bundle;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.EntityType;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
@@ -111,8 +112,22 @@ public interface ItemService
* @return an iterator over the items in the archive. * @return an iterator over the items in the archive.
* @throws SQLException if database error * @throws SQLException if database error
*/ */
@Deprecated
public Iterator<Item> findAllUnfiltered(Context context) throws SQLException; public Iterator<Item> findAllUnfiltered(Context context) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/** /**
* Find all the items in the archive by a given submitter. The order is * Find all the items in the archive by a given submitter. The order is
* indeterminate. Only items with the "in archive" flag set are included. * indeterminate. Only items with the "in archive" flag set are included.
@@ -813,12 +828,20 @@ public interface ItemService
*/ */
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier, public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier,
String lang, boolean enableVirtualMetadata); String lang, boolean enableVirtualMetadata);
/** /**
* Returns the item's entity type, if any. * Retrieve the label of the entity type of the given item.
* * @param item the item.
* @param item the item * @return the label of the entity type, taken from the item metadata, or null if not found.
* @return the entity type as string, if any
*/ */
public String getEntityType(Item item); public String getEntityTypeLabel(Item item);
/**
* Retrieve the entity type of the given item.
* @param context the DSpace context.
* @param item the item.
* @return the entity type of the given item, or null if not found.
*/
public EntityType getEntityType(Context context, Item item) throws SQLException;
} }

View File

@@ -14,7 +14,9 @@ import java.util.UUID;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship; import org.dspace.content.Relationship;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.RelationshipType; import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.service.DSpaceCRUDService; import org.dspace.service.DSpaceCRUDService;
@@ -49,6 +51,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted) List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted)
throws SQLException; throws SQLException;
/**
* Retrieves the list of Relationships currently in the system for which the given Item is either
* a leftItem or a rightItem object
* @param context The relevant DSpace context
* @param item The Item that has to be the left or right item for the relationship to be
* included in the list
* @param limit paging limit
* @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of relationships for which each relationship adheres to the above
* listed constraint
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/** /**
* Retrieves the full list of relationships currently in the system * Retrieves the full list of relationships currently in the system
* @param context The relevant DSpace context * @param context The relevant DSpace context
@@ -79,30 +100,54 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException; public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException;
/** /**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem * Move the given relationship to a new leftPlace and/or rightPlace.
*
* This will
* 1. verify whether the move is authorized
* 2. move the relationship to the specified left/right place
* 3. update the left/right place of other relationships and/or metadata in order to resolve the move without
* leaving any gaps
*
* At least one of the new places should be non-null, otherwise no changes will be made.
* *
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that has to be the leftItem of a relationship for it to qualify * @param relationship The Relationship to move
* @return The next integer to be used for the leftplace of a relationship with the given item * @param newLeftPlace The value to set the leftPlace of this Relationship to
* as a left item * @param newRightPlace The value to set the rightPlace of this Relationship to
* @return The moved relationship with updated place variables
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/ */
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException; Relationship move(Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace)
throws SQLException, AuthorizeException;
/** /**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem * Move the given relationship to a new leftItem and/or rightItem.
*
* This will
* 1. move the relationship to the last place in its current left or right Item. This ensures that we don't leave
* any gaps when moving the relationship to a new Item.
* If only one of the relationship's Items is changed,the order of relationships and metadatain the other
* will not be affected
* 2. insert the relationship into the new Item(s)
*
* At least one of the new Items should be non-null, otherwise no changes will be made.
* *
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The item that has to be the rightitem of a relationship for it to qualify * @param relationship The Relationship to move
* @return The next integer to be used for the rightplace of a relationship with the given item * @param newLeftItem The value to set the leftItem of this Relationship to
* as a right item * @param newRightItem The value to set the rightItem of this Relationship to
* @return The moved relationship with updated left/right Items variables
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/ */
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException; Relationship move(Context context, Relationship relationship, Item newLeftItem, Item newRightItem)
throws SQLException, AuthorizeException;
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -117,6 +162,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -131,6 +177,24 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property * Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong
*/
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -145,17 +209,51 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
throws SQLException; throws SQLException;
/** /**
* This method will update the place for the Relationship and all other relationships found by the items and * This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* relationship type of the given Relationship. It will give this Relationship the last place in both the * Item object and for which the RelationshipType object is equal to the relationshipType property
* left and right place determined by querying for the list of leftRelationships and rightRelationships * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* by the leftItem, rightItem and relationshipType of the given Relationship.
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationship The Relationship object that will have it's place updated and that will be used * @param item The Item object to be matched on the leftItem or rightItem for the relationship
* to retrieve the other relationships whose place might need to be updated * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param isLeft Is the item left or right
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
public void updatePlaceInRelationship(Context context, Relationship relationship) public List<Relationship> findByItemAndRelationshipType(
throws SQLException, AuthorizeException; Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/** /**
* This method will update the given item's metadata order. * This method will update the given item's metadata order.
@@ -174,6 +272,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationship objects for which the relationshipType property is equal to the given * This method returns a list of Relationship objects for which the relationshipType property is equal to the given
* RelationshipType object * RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @return The list of Relationship objects for which the given RelationshipType object is equal * @return The list of Relationship objects for which the given RelationshipType object is equal
@@ -185,6 +284,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given * This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* RelationshipType object * RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on * @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param limit paging limit * @param limit paging limit
@@ -206,18 +306,40 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
* @param rightPlace The rightPlace integer for the relationship * @param rightPlace The rightPlace integer for the relationship
* @param leftwardValue The leftwardValue string for the relationship * @param leftwardValue The leftwardValue string for the relationship
* @param rightwardValue The rightwardValue string for the relationship * @param rightwardValue The rightwardValue string for the relationship
* @param latestVersionStatus The latestVersionStatus value for the relationship
* @return The created Relationship object with the given properties * @return The created Relationship object with the given properties
* @throws AuthorizeException If something goes wrong * @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, Relationship create(
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue) Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
throws AuthorizeException, SQLException; String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException;
/**
* This method is used to construct a Relationship object with all it's variables,
* except the latest version status
* @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship
* @param relationshipType The RelationshipType object for the relationship
* @param leftPlace The leftPlace integer for the relationship
* @param rightPlace The rightPlace integer for the relationship
* @param leftwardValue The leftwardValue string for the relationship
* @param rightwardValue The rightwardValue string for the relationship
* @return The created Relationship object with the given properties
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException;
/** /**
* This method is used to construct a Relationship object with all it's variables, * This method is used to construct a Relationship object with all it's variables,
* except the leftward and rightward labels * except the leftward label, rightward label and latest version status
* @param c The relevant DSpace context * @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship * @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship * @param rightItem The rightItem Item object for the relationship
@@ -267,7 +389,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/** /**
* Count total number of relationships (rows in relationship table) by a relationship type * Count total number of relationships (rows in relationship table) by a relationship type
* * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @return total count * @return total count
@@ -287,10 +409,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
*/ */
int countByItem(Context context, Item item) throws SQLException; int countByItem(Context context, Item item) throws SQLException;
/**
* This method returns a count of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating * Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not * whether the relationship should contain the item on the left side or not
* * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context * @param context context
* @param relationshipType relationship type to filter by * @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not * @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not
@@ -300,6 +437,22 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft) int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft)
throws SQLException; throws SQLException;
/**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context
* @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return total count with the given parameters
* @throws SQLException if database error
*/
int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/** /**
* Count total number of relationships (rows in relationship table) * Count total number of relationships (rows in relationship table)
* by a relationship leftward or rightward typeName * by a relationship leftward or rightward typeName

View File

@@ -155,12 +155,11 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of distinct results as depicted by the CriteriaQuery and parameters * @return A list of distinct results as depicted by the CriteriaQuery and parameters
* @throws SQLException * @throws SQLException
*/ */
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, public List<T> list(
int offset) throws SQLException { Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset
) throws SQLException {
criteriaQuery.distinct(true); criteriaQuery.distinct(true);
@SuppressWarnings("unchecked") return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
} }
/** /**
@@ -183,12 +182,12 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of results determined by the CriteriaQuery and parameters * @return A list of results determined by the CriteriaQuery and parameters
* @throws SQLException * @throws SQLException
*/ */
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, public List<T> list(
int offset, boolean distinct) throws SQLException { Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset,
boolean distinct
) throws SQLException {
criteriaQuery.distinct(distinct); criteriaQuery.distinct(distinct);
@SuppressWarnings("unchecked") return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
} }
/** /**

View File

@@ -259,12 +259,19 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
super.handler.logError("EPerson not found: " + currentUserUuid); super.handler.logError("EPerson not found: " + currentUserUuid);
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
} }
assignSpecialGroupsInContext();
this.context.setCurrentUser(eperson); this.context.setCurrentUser(eperson);
} catch (SQLException e) { } catch (SQLException e) {
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
} }
} }
protected void assignSpecialGroupsInContext() throws SQLException {
for (UUID uuid : handler.getSpecialGroups()) {
context.setSpecialGroup(uuid);
}
}
/** /**
* Fills in some optional command line options. * Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options. * Checks if there are missing required options or invalid values for options.

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer {
// collect Items, Collections, Communities that need indexing // collect Items, Collections, Communities that need indexing
private Set<IndexableObject> objectsToUpdate = new HashSet<>(); private Set<IndexableObject> objectsToUpdate = new HashSet<>();
// collect freshly created Items that need indexing (requires pre-db status)
private Set<IndexableObject> createdItemsToUpdate = new HashSet<>();
// unique search IDs to delete // unique search IDs to delete
private Set<String> uniqueIdsToDelete = new HashSet<>(); private Set<String> uniqueIdsToDelete = new HashSet<>();
@@ -65,6 +68,7 @@ public class IndexEventConsumer implements Consumer {
if (objectsToUpdate == null) { if (objectsToUpdate == null) {
objectsToUpdate = new HashSet<>(); objectsToUpdate = new HashSet<>();
uniqueIdsToDelete = new HashSet<>(); uniqueIdsToDelete = new HashSet<>();
createdItemsToUpdate = new HashSet<>();
} }
int st = event.getSubjectType(); int st = event.getSubjectType();
@@ -143,6 +147,7 @@ public class IndexEventConsumer implements Consumer {
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString(); String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
uniqueIdsToDelete.add(detail); uniqueIdsToDelete.add(detail);
} }
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
} }
break; break;
@@ -162,7 +167,7 @@ public class IndexEventConsumer implements Consumer {
// also update the object in order to index mapped/unmapped Items // also update the object in order to index mapped/unmapped Items
if (subject != null && if (subject != null &&
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) { subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
} }
} }
break; break;
@@ -209,23 +214,11 @@ public class IndexEventConsumer implements Consumer {
} }
// update the changed Items not deleted because they were on create list // update the changed Items not deleted because they were on create list
for (IndexableObject iu : objectsToUpdate) { for (IndexableObject iu : objectsToUpdate) {
/* we let all types through here and indexObject(ctx, iu, false);
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
} }
// update the created Items with a pre-db status
for (IndexableObject iu : createdItemsToUpdate) {
indexObject(ctx, iu, true);
} }
} finally { } finally {
if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) { if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) {
@@ -235,6 +228,27 @@ public class IndexEventConsumer implements Consumer {
// "free" the resources // "free" the resources
objectsToUpdate.clear(); objectsToUpdate.clear();
uniqueIdsToDelete.clear(); uniqueIdsToDelete.clear();
createdItemsToUpdate.clear();
}
}
}
private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false, preDb);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
} }
} }
} }

View File

@@ -9,7 +9,9 @@ package org.dspace.discovery;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
@@ -30,6 +32,17 @@ public interface IndexingService {
void indexContent(Context context, IndexableObject dso, void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit) throws SQLException, SearchServiceException; boolean force, boolean commit) throws SQLException, SearchServiceException;
/**
* Index a given DSO
* @param context The DSpace Context
* @param dso The DSpace Object to index
* @param force Force update even if not stale
* @param commit Commit the changes
* @param preDb Add a "preDB" status to the index (only applicable to Items)
*/
void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException;
void unIndexContent(Context context, IndexableObject dso) void unIndexContent(Context context, IndexableObject dso)
throws SQLException, IOException; throws SQLException, IOException;
@@ -62,4 +75,15 @@ public interface IndexingService {
void optimize() throws SearchServiceException; void optimize() throws SearchServiceException;
void buildSpellCheck() throws SearchServiceException, IOException; void buildSpellCheck() throws SearchServiceException, IOException;
/**
* Atomically update the index of a single field for an object
* @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for
* @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/
*/
void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String,Object> fieldModifier)
throws SolrServerException, IOException;
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -38,6 +39,7 @@ public interface SearchService {
DiscoverResult search(Context context, DiscoverQuery query) DiscoverResult search(Context context, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/** /**
* Convenient method to call @see #search(Context, DSpaceObject, * Convenient method to call @see #search(Context, DSpaceObject,
* DiscoverQuery, boolean) with includeWithdrawn=false * DiscoverQuery, boolean) with includeWithdrawn=false
@@ -52,6 +54,19 @@ public interface SearchService {
DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query) DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/**
* Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the
* results
*
* @param context DSpace context object
* @param dso a DSpace object to use as a scope of the search
* @param query the discovery query object
* @return an iterator iterating over all results from the search
* @throws SearchServiceException if search error
*/
Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException;
List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset, List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset,
int max, String... filterquery); int max, String... filterquery);

View File

@@ -20,6 +20,7 @@ import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.kernel.ServiceManager; import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
@@ -170,4 +171,10 @@ public class SearchUtils {
DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName); DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName);
result.add(configurationExtra); result.add(configurationExtra);
} }
public static DiscoverQueryBuilder getQueryBuilder() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager
.getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class);
}
} }

View File

@@ -8,6 +8,8 @@
package org.dspace.discovery; package org.dspace.discovery;
import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.joining;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
@@ -118,8 +120,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
/** /**
* If the handle for the "dso" already exists in the index, and the "dso" * If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index * has a lastModified timestamp that is newer than the document in the index
@@ -166,6 +166,24 @@ public class SolrServiceImpl implements SearchService, IndexingService {
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} }
/**
* Update the given indexable object using a given service
* @param context The DSpace Context
* @param indexableObjectService The service to index the object with
* @param indexableObject The object to index
* @param preDB Add a "preDB" status to the document
*/
protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject,
boolean preDB) throws IOException, SQLException, SolrServerException {
if (preDB) {
final SolrInputDocument solrInputDocument =
indexableObjectService.buildNewDocument(context, indexableObject);
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} else {
update(context, indexableObjectService, indexableObject);
}
}
/** /**
* unIndex removes an Item, Collection, or Community * unIndex removes an Item, Collection, or Community
* *
@@ -454,6 +472,16 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String, Object> fieldModifier)
throws SolrServerException, IOException {
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId);
solrInputDocument.addField(field, fieldModifier);
solrSearchCore.getSolr().add(solrInputDocument);
}
// ////////////////////////////////// // //////////////////////////////////
// Private // Private
// ////////////////////////////////// // //////////////////////////////////
@@ -717,6 +745,11 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
@Override
public Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException {
return new SearchIterator(context, dso, query);
}
@Override @Override
public DiscoverResult search(Context context, DiscoverQuery discoveryQuery) public DiscoverResult search(Context context, DiscoverQuery discoveryQuery)
@@ -733,6 +766,72 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
/**
* This class implements an iterator over items that is specifically used to iterate over search results
*/
private class SearchIterator implements Iterator<Item> {
private Context context;
private DiscoverQuery discoverQuery;
private DiscoverResult discoverResult;
private IndexableObject dso;
private int absoluteCursor;
private int relativeCursor;
private int pagesize;
SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException {
this.context = context;
this.discoverQuery = discoverQuery;
this.absoluteCursor = discoverQuery.getStart();
initialise();
}
SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery)
throws SearchServiceException {
this.context = context;
this.dso = dso;
this.discoverQuery = discoverQuery;
initialise();
}
private void initialise() throws SearchServiceException {
this.relativeCursor = 0;
if (discoverQuery.getMaxResults() != -1) {
pagesize = discoverQuery.getMaxResults();
} else {
pagesize = 10;
}
discoverQuery.setMaxResults(pagesize);
this.discoverResult = search(context, dso, discoverQuery);
}
@Override
public boolean hasNext() {
return absoluteCursor < discoverResult.getTotalSearchResults();
}
@Override
public Item next() {
//paginate getting results from the discoverquery.
if (relativeCursor == pagesize) {
// get a new page of results when the last element of the previous page has been read
int offset = absoluteCursor;
// reset the position counter for getting element relativecursor on a page
relativeCursor = 0;
discoverQuery.setStart(offset);
try {
discoverResult = search(context, dso, discoverQuery);
} catch (SearchServiceException e) {
log.error("error while getting search results", e);
}
}
// get the element at position relativecursor on a page
IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor);
relativeCursor++;
absoluteCursor++;
return (Item) res.getIndexedObject();
}
}
protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery) protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException { throws SearchServiceException {
SolrQuery solrQuery = new SolrQuery(); SolrQuery solrQuery = new SolrQuery();
@@ -753,6 +852,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD); solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
solrQuery.addField(STATUS_FIELD);
if (discoveryQuery.isSpellCheck()) { if (discoveryQuery.isSpellCheck()) {
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query); solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -903,12 +1003,15 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Enables solr to remove documents related to items not on database anymore (Stale) // Enables solr to remove documents related to items not on database anymore (Stale)
// if maxAttemps is greater than 0 cleanup the index on each step // if maxAttemps is greater than 0 cleanup the index on each step
if (maxAttempts >= 0) { if (maxAttempts >= 0) {
Object statusObj = doc.getFirstValue(STATUS_FIELD);
if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// avoid to process the response except if we are in the last allowed execution. // avoid to process the response except if we are in the last allowed execution.
// When maxAttempts is 0 this will be just the first and last run as the // When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1 // executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount; skipLoadingResponse = maxAttempts + 1 != executionCount;
} }
}
continue; continue;
} }
if (!skipLoadingResponse) { if (!skipLoadingResponse) {
@@ -1389,6 +1492,28 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void indexContent(Context context, IndexableObject indexableObject, boolean force,
boolean commit, boolean preDb) throws SearchServiceException, SQLException {
if (preDb) {
try {
final IndexFactory indexableObjectFactory = indexObjectServiceFactory.
getIndexableObjectFactory(indexableObject);
if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) {
update(context, indexableObjectFactory, indexableObject, true);
log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID()));
}
} catch (IOException | SQLException | SolrServerException | SearchServiceException e) {
log.error(e.getMessage(), e);
}
} else {
indexContent(context, indexableObject, force);
}
if (commit) {
commit();
}
}
@Override @Override
public void commit() throws SearchServiceException { public void commit() throws SearchServiceException {
try { try {

View File

@@ -70,10 +70,20 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
return doc; return doc;
} }
@Override
public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException {
return buildDocument(context, indexableObject);
}
@Override @Override
public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument) public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument)
throws SQLException, IOException, SolrServerException { throws SQLException, IOException, SolrServerException {
try {
writeDocument(solrInputDocument, null); writeDocument(solrInputDocument, null);
} catch (Exception e) {
log.error("Error occurred while writing SOLR document for {} object {}",
indexableObject.getType(), indexableObject.getID(), e);
}
} }
/** /**

View File

@@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@@ -43,7 +42,6 @@ import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.FullTextContentStreams; import org.dspace.discovery.FullTextContentStreams;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
@@ -64,6 +62,9 @@ import org.dspace.handle.service.HandleService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.MultiFormatDateParser; import org.dspace.util.MultiFormatDateParser;
import org.dspace.util.SolrUtils; import org.dspace.util.SolrUtils;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService; import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -78,6 +79,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class);
public static final String VARIANTS_STORE_SEPARATOR = "###"; public static final String VARIANTS_STORE_SEPARATOR = "###";
public static final String STORE_SEPARATOR = "\n|||\n"; public static final String STORE_SEPARATOR = "\n|||\n";
public static final String STATUS_FIELD = "database_status";
public static final String STATUS_FIELD_PREDB = "predb";
@Autowired @Autowired
@@ -96,11 +99,13 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
protected WorkflowItemIndexFactory workflowItemIndexFactory; protected WorkflowItemIndexFactory workflowItemIndexFactory;
@Autowired @Autowired
protected WorkspaceItemIndexFactory workspaceItemIndexFactory; protected WorkspaceItemIndexFactory workspaceItemIndexFactory;
@Autowired
protected VersionHistoryService versionHistoryService;
@Override @Override
public Iterator<IndexableItem> findAll(Context context) throws SQLException { public Iterator<IndexableItem> findAll(Context context) throws SQLException {
Iterator<Item> items = itemService.findAllUnfiltered(context); Iterator<Item> items = itemService.findAllRegularItems(context);
return new Iterator<IndexableItem>() { return new Iterator<IndexableItem>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
@@ -139,6 +144,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
doc.addField("withdrawn", item.isWithdrawn()); doc.addField("withdrawn", item.isWithdrawn());
doc.addField("discoverable", item.isDiscoverable()); doc.addField("discoverable", item.isDiscoverable());
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified())); doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
doc.addField("latestVersion", isLatestVersion(context, item));
EPerson submitter = item.getSubmitter(); EPerson submitter = item.getSubmitter();
if (submitter != null) { if (submitter != null) {
@@ -169,6 +175,51 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return doc; return doc;
} }
/**
* Check whether the given item is the latest version.
* If the latest item cannot be determined, because either the version history or the latest version is not present,
* assume the item is latest.
* @param context the DSpace context.
* @param item the item that should be checked.
* @return true if the item is the latest version, false otherwise.
*/
protected boolean isLatestVersion(Context context, Item item) throws SQLException {
VersionHistory history = versionHistoryService.findByItem(context, item);
if (history == null) {
// not all items have a version history
// if an item does not have a version history, it is by definition the latest version
return true;
}
// start with the very latest version of the given item (may still be in workspace)
Version latestVersion = versionHistoryService.getLatestVersion(context, history);
// find the latest version of the given item that is archived
while (latestVersion != null && !latestVersion.getItem().isArchived()) {
latestVersion = versionHistoryService.getPrevious(context, history, latestVersion);
}
// could not find an archived version of the given item
if (latestVersion == null) {
// this scenario should never happen, but let's err on the side of showing too many items vs. to little
// (see discovery.xml, a lot of discovery configs filter out all items that are not the latest version)
return true;
}
// sanity check
assert latestVersion.getItem().isArchived();
return item.equals(latestVersion.getItem());
}
@Override
public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem)
throws SQLException, IOException {
SolrInputDocument doc = buildDocument(context, indexableItem);
doc.addField(STATUS_FIELD, STATUS_FIELD_PREDB);
return doc;
}
@Override @Override
public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item, public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item,
List<DiscoveryConfiguration> discoveryConfigurations) List<DiscoveryConfiguration> discoveryConfigurations)
@@ -713,26 +764,31 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
} }
@Override @Override
public List getIndexableObjects(Context context, Item object) throws SQLException { public List getIndexableObjects(Context context, Item item) throws SQLException {
List<IndexableObject> results = new ArrayList<>(); if (item.isArchived() || item.isWithdrawn()) {
if (object.isArchived() || object.isWithdrawn()) { // we only want to index an item as an item if it is not in workflow
// We only want to index an item as an item if it is not in workflow return List.of(new IndexableItem(item));
results.addAll(Arrays.asList(new IndexableItem(object)));
} else {
// Check if we have a workflow / workspace item
final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, object);
if (workspaceItem != null) {
results.addAll(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
} else {
// Check if we a workflow item
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, object);
if (xmlWorkflowItem != null) {
results.addAll(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
}
} }
return results; final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, item);
if (workspaceItem != null) {
// a workspace item is linked to the given item
return List.copyOf(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
}
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, item);
if (xmlWorkflowItem != null) {
// a workflow item is linked to the given item
return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
if (!isLatestVersion(context, item)) {
// the given item is an older version of another item
return List.of(new IndexableItem(item));
}
// nothing to index
return List.of();
} }
@Override @Override

View File

@@ -46,6 +46,14 @@ public interface IndexFactory<T extends IndexableObject, S> {
*/ */
SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException; SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException;
/**
* Create solr document with all the shared fields initialized.
* Can contain special fields required for "new" documents vs regular buildDocument
* @param indexableObject the indexableObject that we want to index
* @return initialized solr document
*/
SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException;
/** /**
* Write the provided document to the solr core * Write the provided document to the solr core
* @param context DSpace context object * @param context DSpace context object

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.utils; package org.dspace.discovery.utils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@@ -19,10 +19,6 @@ import java.util.Objects;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.query.SearchQueryConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFacetField;
@@ -32,6 +28,7 @@ import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration; import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration;
@@ -40,17 +37,11 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.discovery.configuration.DiscoverySortConfiguration; import org.dspace.discovery.configuration.DiscoverySortConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;
/**
* This class builds the queries for the /search and /facet endpoints.
*/
@Component
public class DiscoverQueryBuilder implements InitializingBean { public class DiscoverQueryBuilder implements InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class);
@@ -80,19 +71,24 @@ public class DiscoverQueryBuilder implements InitializingBean {
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page) String dsoType, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection) throws SearchServiceException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page); return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset,
sortProperty, sortDirection);
} }
/** /**
* Build a discovery query * Build a discovery query
* *
@@ -102,13 +98,17 @@ public class DiscoverQueryBuilder implements InitializingBean {
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page) List<String> dsoTypes, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection)
throws IllegalArgumentException, SearchServiceException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
@@ -117,8 +117,8 @@ public class DiscoverQueryBuilder implements InitializingBean {
addFaceting(context, scope, queryArgs, discoveryConfiguration); addFaceting(context, scope, queryArgs, discoveryConfiguration);
//Configure pagination and sorting //Configure pagination and sorting
configurePagination(page, queryArgs); configurePagination(pageSize, offset, queryArgs);
configureSorting(page, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration());
addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs); addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs);
return queryArgs; return queryArgs;
@@ -147,19 +147,21 @@ public class DiscoverQueryBuilder implements InitializingBean {
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param facetName the facet field * @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page, String facetName) String dsoType, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery( return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset,
facetName);
} }
/** /**
@@ -172,49 +174,52 @@ public class DiscoverQueryBuilder implements InitializingBean {
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param facetName the facet field * @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName) List<String> dsoTypes, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page); addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize);
//We don' want any search results, we only want facet values //We don' want any search results, we only want facet values
queryArgs.setMaxResults(0); queryArgs.setMaxResults(0);
//Configure pagination //Configure pagination
configurePaginationForFacets(page, queryArgs); configurePaginationForFacets(offset, queryArgs);
return queryArgs; return queryArgs;
} }
private void configurePaginationForFacets(Pageable page, DiscoverQuery queryArgs) { private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) {
if (page != null) { if (offset != null) {
queryArgs.setFacetOffset(Math.toIntExact(page.getOffset())); queryArgs.setFacetOffset(Math.toIntExact(offset));
} }
} }
private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix, private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, String facetName, Pageable page) DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration,
throws DSpaceBadRequestException { String facetName, Integer pageSize)
throws IllegalArgumentException {
DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName); DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName);
if (facet != null) { if (facet != null) {
queryArgs.setFacetMinCount(1); queryArgs.setFacetMinCount(1);
int pageSize = Math.min(pageSizeLimit, page.getPageSize());
pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit;
fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize); fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize);
} else { } else {
throw new DSpaceBadRequestException(facetName + " is not a valid search facet"); throw new IllegalArgumentException(facetName + " is not a valid search facet");
} }
return queryArgs; return queryArgs;
@@ -241,18 +246,18 @@ public class DiscoverQueryBuilder implements InitializingBean {
int facetLimit = pageSize + 1; int facetLimit = pageSize + 1;
//This should take care of the sorting for us //This should take care of the sorting for us
queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit, queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit,
facet.getSortOrderSidebar(), StringUtils.trimToNull(prefix))); facet.getSortOrderSidebar(),
StringUtils.trimToNull(prefix)));
} }
} }
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query, String query,
List<SearchFilter> searchFilters, List<String> dsoTypes) List<QueryBuilderSearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
//Add search filters queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters));
queryArgs.addFilterQueries(convertFilters(context, discoveryConfiguration, searchFilters));
//Set search query //Set search query
if (StringUtils.isNotBlank(query)) { if (StringUtils.isNotBlank(query)) {
@@ -274,30 +279,17 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray( .toArray(
new String[discoveryConfiguration.getDefaultFilterQueries() new String[discoveryConfiguration
.getDefaultFilterQueries()
.size()])); .size()]));
return queryArgs; return queryArgs;
} }
private void configureSorting(Pageable page, DiscoverQuery queryArgs, private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs,
DiscoverySortConfiguration searchSortConfiguration) throws DSpaceBadRequestException { DiscoverySortConfiguration searchSortConfiguration)
String sortBy = null; throws IllegalArgumentException, SearchServiceException {
String sortOrder = null; String sortBy = sortProperty;
String sortOrder = sortDirection;
//Read the Pageable object if there is one
if (page != null) {
Sort sort = page.getSort();
if (sort != null && sort.iterator().hasNext()) {
Sort.Order order = sort.iterator().next();
sortBy = order.getProperty();
sortOrder = order.getDirection().name();
}
}
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new InvalidSearchRequestException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Load defaults if we did not receive values //Load defaults if we did not receive values
if (sortBy == null) { if (sortBy == null) {
@@ -307,6 +299,12 @@ public class DiscoverQueryBuilder implements InitializingBean {
sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder); sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder);
} }
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new SearchServiceException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Update Discovery query //Update Discovery query
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
.getSortFieldConfiguration(sortBy); .getSortFieldConfiguration(sortBy);
@@ -320,11 +318,11 @@ public class DiscoverQueryBuilder implements InitializingBean {
} else if ("desc".equalsIgnoreCase(sortOrder)) { } else if ("desc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc); queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc);
} else { } else {
throw new DSpaceBadRequestException(sortOrder + " is not a valid sort order"); throw new IllegalArgumentException(sortOrder + " is not a valid sort order");
} }
} else { } else {
throw new DSpaceBadRequestException(sortBy + " is not a valid sort field"); throw new IllegalArgumentException(sortBy + " is not a valid sort field");
} }
} }
@@ -354,59 +352,24 @@ public class DiscoverQueryBuilder implements InitializingBean {
return sortBy; return sortBy;
} }
private void configurePagination(Pageable page, DiscoverQuery queryArgs) { private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) {
if (page != null) { queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit);
queryArgs.setMaxResults(Math.min(pageSizeLimit, page.getPageSize())); queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0);
queryArgs.setStart(Math.toIntExact(page.getOffset()));
} else {
queryArgs.setMaxResults(pageSizeLimit);
queryArgs.setStart(0);
}
} }
private String getDsoType(String dsoType) throws DSpaceBadRequestException { private String getDsoType(String dsoType) throws IllegalArgumentException {
for (IndexFactory indexFactory : indexableFactories) { for (IndexFactory indexFactory : indexableFactories) {
if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) { if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) {
return indexFactory.getType(); return indexFactory.getType();
} }
} }
throw new DSpaceBadRequestException(dsoType + " is not a valid DSpace Object type"); throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type");
} }
public void setIndexableFactories(List<IndexFactory> indexableFactories) { public void setIndexableFactories(List<IndexFactory> indexableFactories) {
this.indexableFactories = indexableFactories; this.indexableFactories = indexableFactories;
} }
private String[] convertFilters(Context context, DiscoveryConfiguration discoveryConfiguration,
List<SearchFilter> searchFilters) throws DSpaceBadRequestException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
SearchQueryConverter searchQueryConverter = new SearchQueryConverter();
List<SearchFilter> transformedFilters = searchQueryConverter.convert(searchFilters);
try {
for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(transformedFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new DSpaceBadRequestException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new DSpaceBadRequestException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs, private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs,
DiscoveryConfiguration discoveryConfiguration) { DiscoveryConfiguration discoveryConfiguration) {
@@ -427,4 +390,34 @@ public class DiscoverQueryBuilder implements InitializingBean {
return queryArgs; return queryArgs;
} }
private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration,
List<QueryBuilderSearchFilter> searchFilters)
throws IllegalArgumentException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
try {
for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new IllegalArgumentException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
} }

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.utils.parameter;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* Representation for a Discovery search filter
*/
public class QueryBuilderSearchFilter {
private String name;
private String operator;
private String value;
public QueryBuilderSearchFilter(final String name, final String operator, final String value) {
this.name = name;
this.operator = operator;
this.value = value;
}
public String getName() {
return name;
}
public String getOperator() {
return operator;
}
public String getValue() {
return value;
}
public String toString() {
return "QueryBuilderSearchFilter{" +
"name='" + name + '\'' +
", operator='" + operator + '\'' +
", value='" + value + '\'' +
'}';
}
public boolean equals(Object object) {
if (object instanceof QueryBuilderSearchFilter) {
QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object;
if (!StringUtils.equals(obj.getName(), getName())) {
return false;
}
if (!StringUtils.equals(obj.getOperator(), getOperator())) {
return false;
}
if (!StringUtils.equals(obj.getValue(), getValue())) {
return false;
}
return true;
}
return false;
}
public int hashCode() {
return Objects.hash(name, operator, value);
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class CiniiFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "ciniiMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,447 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.dspace.services.ConfigurationService;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Cinii
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String urlSearch;
@Autowired
private LiveImportClient liveImportClient;
@Autowired
private ConfigurationService configurationService;
@Override
public String getImportSource() {
return "cinii";
}
@Override
public void init() throws Exception {}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Cinii");
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
/**
* This class is a Callable implementation to get CiNii entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, count, null, null, null, start, queryString);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> tmp = search(id, appId);
if (CollectionUtils.isNotEmpty(tmp)) {
tmp.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(tmp);
}
}
return records;
}
}
/**
* This class is a Callable implementation to get an CiNii entry using CiNii ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String id = query.getParameterAsClass("id", String.class);
List<ImportRecord> importRecord = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecord)) {
importRecord.forEach(x -> x.addValue(createIdentifier(id)));
}
return importRecord;
}
}
/**
* This class is a Callable implementation to search CiNii entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, maxResult, author, title, year, start, null);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> importRecords = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecords)) {
importRecords.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(importRecords);
}
}
return records;
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an CiNii query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String queryString = query.getParameterAsClass("query", String.class);
return countCiniiElement(appId, null, null, null, null, null, queryString);
}
}
/**
* Get metadata by searching CiNii RDF API with CiNii NAID
*
* @param id CiNii NAID to search by
* @param appId registered application identifier for the API
* @return record metadata
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
*/
protected List<ImportRecord> search(String id, String appId)
throws IOException, HttpException {
try {
List<ImportRecord> records = new LinkedList<ImportRecord>();
URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
records.add(transformSourceRecords(record));
}
return records;
} catch (URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
return root.getChildren();
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789)
* to the searched CiNii articles
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private List<String> getCiniiIds(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
List<String> ids = new ArrayList<>();
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
if (StringUtils.isNotBlank(appId)) {
uriBuilder.addParameter("appid", appId);
}
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
int url_len = this.url.length() - 1;
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"),
Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"));
XPathExpression<Attribute> xpath = XPathFactory.instance().compile("//ns:item/@rdf:about",
Filters.attribute(), null, namespaces);
List<Attribute> recordsList = xpath.evaluate(root);
for (Attribute item : recordsList) {
String value = item.getValue();
if (value.length() > url_len) {
ids.add(value.substring(url_len + 1));
}
}
return ids;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns the total number of CiNii articles returned by a specific query
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private Integer countCiniiElement(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
uriBuilder.addParameter("appid", appId);
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays
.asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//opensearch:totalResults",
Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(root);
if (nodes != null && !nodes.isEmpty()) {
return Integer.parseInt(((Element) nodes.get(0)).getText());
}
return 0;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private MetadatumDTO createIdentifier(String id) {
MetadatumDTO metadatumDTO = new MetadatumDTO();
metadatumDTO.setSchema("dc");
metadatumDTO.setElement("identifier");
metadatumDTO.setQualifier("other");
metadatumDTO.setValue(id);
return metadatumDTO;
}
}

View File

@@ -74,7 +74,8 @@ public class LiveImportClientImpl implements LiveImportClient {
HttpResponse httpResponse = httpClient.execute(method); HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) { if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException(); throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code, reason= "
+ httpResponse.getStatusLine().getReasonPhrase());
} }
InputStream inputStream = httpResponse.getEntity().getContent(); InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset()); return IOUtils.toString(inputStream, Charset.defaultCharset());

View File

@@ -0,0 +1,173 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the ScopusID, orcid, author name and affiliationID
* from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor {
private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private MetadataFieldConfig orcid;
private MetadataFieldConfig scopusId;
private MetadataFieldConfig authname;
private MetadataFieldConfig affiliation;
private Map<String, String> affId2affName = new HashMap<String, String>();
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query),
* different types of values will be added to the MetadatumDTO list.
*
* @param element A class to retrieve metadata from.
* @return A collection of import records. Only the ScopusID, orcid, author name and affiliation
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
fillAffillation(element);
try {
List<Element> nodes = element.getChildren("author", NAMESPACE);
for (Element el : nodes) {
metadatums = getMetadataOfAuthors(el);
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
return values;
}
/**
* Retrieve the the ScopusID, orcid, author name and affiliationID
* metadata associated with the given element object.
* If the value retrieved from the element is empty
* it is set PLACEHOLDER_PARENT_METADATA_VALUE
*
* @param element A class to retrieve metadata from
* @throws JaxenException If Xpath evaluation failed
*/
private List<MetadatumDTO> getMetadataOfAuthors(Element element) throws JaxenException {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
Element authname = element.getChild("authname", NAMESPACE);
Element scopusId = element.getChild("authid", NAMESPACE);
Element orcid = element.getChild("orcid", NAMESPACE);
Element afid = element.getChild("afid", NAMESPACE);
addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname));
addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId));
addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid));
addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue())
? this.affId2affName.get(afid.getValue()) : null, this.affiliation));
return metadatums;
}
private void addMetadatum(List<MetadatumDTO> list, MetadatumDTO metadatum) {
if (Objects.nonNull(metadatum)) {
list.add(metadatum);
}
}
private String getElementValue(Element element) {
if (Objects.nonNull(element)) {
return element.getValue();
}
return StringUtils.EMPTY;
}
private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) {
if (StringUtils.isBlank(value)) {
return null;
}
MetadatumDTO metadata = new MetadatumDTO();
metadata.setElement(metadaConfig.getElement());
metadata.setQualifier(metadaConfig.getQualifier());
metadata.setSchema(metadaConfig.getSchema());
metadata.setValue(value);
return metadata;
}
private void fillAffillation(Element element) {
try {
List<Element> nodes = element.getChildren("affiliation", NAMESPACE);
for (Element el : nodes) {
fillAffiliation2Name(el);
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
}
private void fillAffiliation2Name(Element element) throws JaxenException {
Element affilationName = element.getChild("affilname", NAMESPACE);
Element affilationId = element.getChild("afid", NAMESPACE);
if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) {
affId2affName.put(affilationId.getValue(), affilationName.getValue());
}
}
public MetadataFieldConfig getAuthname() {
return authname;
}
public void setAuthname(MetadataFieldConfig authname) {
this.authname = authname;
}
public MetadataFieldConfig getOrcid() {
return orcid;
}
public void setOrcid(MetadataFieldConfig orcid) {
this.orcid = orcid;
}
public MetadataFieldConfig getScopusId() {
return scopusId;
}
public void setScopusId(MetadataFieldConfig scopusId) {
this.scopusId = scopusId;
}
public MetadataFieldConfig getAffiliation() {
return affiliation;
}
public void setAffiliation(MetadataFieldConfig affiliation) {
this.affiliation = affiliation;
}
}

View File

@@ -0,0 +1,110 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the Scopus startPage and endPage from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private MetadataFieldConfig startPageMetadata;
private MetadataFieldConfig endPageMetadata;
/**
* Retrieve the metadata associated with the given Element object.
* Depending on the retrieved node (using the query),
* StartPage and EndPage values will be added to the MetadatumDTO list
*
* @param el A class to retrieve metadata from.
* @return A collection of import records. Only the StartPage and EndPage
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element el) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = el.getChildren(query, Namespace.getNamespace(ns));
for (Element element : nodes) {
metadatums = getMetadatum(element.getValue());
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
}
return values;
}
private List<MetadatumDTO> getMetadatum(String value) {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
if (StringUtils.isBlank(value)) {
return null;
}
String [] range = value.split("-");
if (range.length == 2) {
metadatums.add(setStartPage(range));
metadatums.add(setEndPage(range));
} else if (range.length != 0) {
metadatums.add(setStartPage(range));
}
return metadatums;
}
private MetadatumDTO setEndPage(String[] range) {
MetadatumDTO endPage = new MetadatumDTO();
endPage.setValue(range[1]);
endPage.setElement(endPageMetadata.getElement());
endPage.setQualifier(endPageMetadata.getQualifier());
endPage.setSchema(endPageMetadata.getSchema());
return endPage;
}
private MetadatumDTO setStartPage(String[] range) {
MetadatumDTO startPage = new MetadatumDTO();
startPage.setValue(range[0]);
startPage.setElement(startPageMetadata.getElement());
startPage.setQualifier(startPageMetadata.getQualifier());
startPage.setSchema(startPageMetadata.getSchema());
return startPage;
}
public MetadataFieldConfig getStartPageMetadata() {
return startPageMetadata;
}
public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) {
this.startPageMetadata = startPageMetadata;
}
public MetadataFieldConfig getEndPageMetadata() {
return endPageMetadata;
}
public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) {
this.endPageMetadata = endPageMetadata;
}
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* This contributor replace specific character in the metadata value.
* It is useful for some provider (e.g. Scopus) which use containing "/" character.
* Actually, "/" will never encode by framework in URL building. In the same ways, if we
* encode "/" -> %2F, it will be encoded by framework and become %252F.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private char characterToBeReplaced;
private char characterToReplaceWith;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = element.getChildren(query, Namespace.getNamespace(ns));
for (Element el : nodes) {
values.add(getMetadatum(field, el.getValue()));
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (Objects.isNull(field)) {
return null;
}
dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith));
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
public void setCharacterToBeReplaced(int characterToBeReplaced) {
this.characterToBeReplaced = (char)characterToBeReplaced;
}
public void setCharacterToReplaceWith(int characterToReplaceWith) {
this.characterToReplaceWith = (char)characterToReplaceWith;
}
}

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor is able to concat multi value.
* Given a certain path, if it contains several nodes,
* the values of nodes will be concatenated into a single one.
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node,
* which may contain several <p> paragraphs,
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleConcatContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
StringBuilder text = new StringBuilder();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
if (StringUtils.isNotBlank(element.getText())) {
text.append(element.getText());
}
} else {
log.warn("node of type: " + el.getClass());
}
}
if (StringUtils.isNotBlank(text.toString())) {
values.add(metadataFieldMapping.toDCValue(field, text.toString()));
}
return values;
}
}

View File

@@ -0,0 +1,75 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web of Science specific implementation of {@link MetadataContributor}.
* This contributor can perform research on multi-paths.
* For example, to populate the subject metadata, in the Web of Science response
* the values are contained in different paths,
* so this Contributor allows you to collect the values by configuring the paths in the paths list.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private List<String> paths;
public SimpleMultiplePathContributor() {}
public SimpleMultiplePathContributor(List<String> paths) {
this.paths = paths;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
for (String path : this.paths) {
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText()));
} else {
log.warn("node of type: " + el.getClass());
}
}
}
return values;
}
public List<String> getPaths() {
return paths;
}
public void setPaths(List<String> paths) {
this.paths = paths;
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor checks for each node returned for the supplied path
* if node contains supplied attribute - the value of the current node is taken if exist.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com)
*/
public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private String attribute;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
if (StringUtils.isNotBlank(attributeValue)) {
values.add(metadataFieldMapping.toDCValue(this.field, attributeValue));
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
}

View File

@@ -34,10 +34,10 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> { public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field;
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected MetadataFieldConfig field;
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
* *
@@ -171,4 +171,5 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
} }
return values; return values;
} }
} }

View File

@@ -0,0 +1,160 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
* This contributor checks for each node returned for the given path if the node contains "this.attribute"
* and then checks if the attribute value is one of the values configured
* in the "this.attributeValue2metadata" map, if the value of the current known is taken.
* If "this.firstChild" is true, it takes the value of the child of the known.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosAttribute2ValueContributor implements MetadataContributor<Element> {
private final static Logger log = LogManager.getLogger();
private String query;
private String attribute;
private boolean firstChild;
private String childName;
private Map<String, String> prefixToNamespaceMapping;
private Map<String, MetadataFieldConfig> attributeValue2metadata;
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
public WosAttribute2ValueContributor() {}
public WosAttribute2ValueContributor(String query,
Map<String, String> prefixToNamespaceMapping,
Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.attributeValue2metadata = attributeValue2metadata;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
setField(attributeValue, element, values);
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setField(String attributeValue, Element el, List<MetadatumDTO> values) {
for (String id : attributeValue2metadata.keySet()) {
if (StringUtils.equals(id, attributeValue)) {
if (this.firstChild) {
String value = el.getChild(this.childName).getValue();
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value));
} else {
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText()));
}
}
}
}
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
public Map<String, MetadataFieldConfig> getAttributeValue2metadata() {
return attributeValue2metadata;
}
public void setAttributeValue2metadata(Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.attributeValue2metadata = attributeValue2metadata;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public boolean isFirstChild() {
return firstChild;
}
public void setFirstChild(boolean firstChild) {
this.firstChild = firstChild;
}
public String getChildName() {
return childName;
}
public void setChildName(String childName) {
this.childName = childName;
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can retrieve the identifiers
* configured in "this.identifire2field" from the Web of Science response.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierContributor extends SimpleXpathMetadatumContributor {
protected Map<String, MetadataFieldConfig> identifier2field;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Element> xpath =
XPathFactory.instance().compile(query, Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(element);
for (Element el : nodes) {
String type = el.getAttributeValue("type");
setIdentyfier(type, el, values);
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
for (String id : identifier2field.keySet()) {
if (StringUtils.equals(id, type)) {
String value = el.getAttributeValue("value");
values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value));
}
}
}
public Map<String, MetadataFieldConfig> getIdentifier2field() {
return identifier2field;
}
public void setIdentifier2field(Map<String, MetadataFieldConfig> identifier2field) {
this.identifier2field = identifier2field;
}
}

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = ((Element) el).getChild("name");
if (Objects.nonNull(element)) {
String type = element.getAttributeValue("role");
setIdentyfier(type, element, values);
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
if (StringUtils.equals("researcher_id", type)) {
String value = el.getAttributeValue("r_id");
if (StringUtils.isNotBlank(value)) {
values.add(metadataFieldMapping.toDCValue(this.field, value));
}
}
}
}

View File

@@ -14,23 +14,23 @@ import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.FileMultipleOccurencesException; import org.dspace.importer.external.exception.FileMultipleOccurencesException;
import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.FileSource;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
@@ -41,6 +41,7 @@ import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression; import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory; import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implements a data source for querying PubMed Central * Implements a data source for querying PubMed Central
@@ -51,13 +52,16 @@ import org.jdom2.xpath.XPathFactory;
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element> public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource, FileSource { implements QuerySource, FileSource {
private String baseAddress; private String urlFetch;
private String urlSearch;
// it is protected so that subclass can mock it for testing private int attempt = 3;
protected WebTarget pubmedWebTarget;
private List<String> supportedExtensions; private List<String> supportedExtensions;
@Autowired
private LiveImportClient liveImportClient;
/** /**
* Set the file extensions supported by this metadata service * Set the file extensions supported by this metadata service
* *
@@ -187,29 +191,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
* @throws Exception on generic exception * @throws Exception on generic exception
*/ */
@Override @Override
public void init() throws Exception { public void init() throws Exception {}
Client client = ClientBuilder.newClient();
WebTarget webTarget = client.target(baseAddress);
pubmedWebTarget = webTarget.queryParam("db", "pubmed");
}
/**
* Return the baseAddress set to this object
*
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() {
return baseAddress;
}
/**
* Set the baseAddress to this object
*
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress;
}
private class GetNbRecords implements Callable<Integer> { private class GetNbRecords implements Callable<Integer> {
@@ -226,23 +208,26 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("query", String.class)); uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
Response response = invocationBuilder.get(); countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String responseString = response.readEntity(String.class);
String count = getSingleElementValue(responseString, "Count");
return Integer.parseInt(count);
}
} }
if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
return Integer.parseInt(getSingleElementValue(response, "Count"));
}
}
private String getSingleElementValue(String src, String elementName) { private String getSingleElementValue(String src, String elementName) {
String value = null; String value = null;
@@ -286,41 +271,61 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
Integer start = query.getParameterAsClass("start", Integer.class); Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class); Integer count = query.getParameterAsClass("count", Integer.class);
if (count == null || count < 0) { if (Objects.isNull(count) || count < 0) {
count = 10; count = 10;
} }
if (start == null || start < 0) { if (Objects.isNull(start) || start < 0) {
start = 0; start = 0;
} }
List<ImportRecord> records = new LinkedList<ImportRecord>(); List<ImportRecord> records = new LinkedList<ImportRecord>();
WebTarget getRecordIdsTarget = pubmedWebTarget.queryParam("term", queryString); URIBuilder uriBuilder = new URIBuilder(urlSearch);
getRecordIdsTarget = getRecordIdsTarget.queryParam("retstart", start); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget.queryParam("retmax", count); uriBuilder.addParameter("retstart", start.toString());
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("retmax", count.toString());
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); uriBuilder.addParameter("usehistory", "y");
uriBuilder.addParameter("term", queryString);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
Response response = invocationBuilder.get(); String queryKey = getSingleElementValue(response, "QueryKey");
String responseString = response.readEntity(String.class); String webEnv = getSingleElementValue(response, "WebEnv");
String queryKey = getSingleElementValue(responseString, "QueryKey"); URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
String webEnv = getSingleElementValue(responseString, "WebEnv"); uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retstart", start.toString());
uriBuilder2.addParameter("retmax", count.toString());
uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
uriBuilder2.addParameter("retmode", "xml");
Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); if (StringUtils.isBlank(response2)) {
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); throw new RuntimeException("After " + attempt
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); + " attempts to contact the PubMed service, a correct answer could not be received."
getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); + " The request was made with this URL:" + uriBuilder2.toString());
getRecordsTarget = getRecordsTarget.queryParam("retmax", count); }
getRecordsTarget = getRecordsTarget.queryParam("retstart", start);
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); List<Element> elements = splitToRecords(response2);
response = invocationBuilder.get();
List<Element> elements = splitToRecords(response.readEntity(String.class));
for (Element record : elements) { for (Element record : elements) {
records.add(transformSourceRecords(record)); records.add(transformSourceRecords(record));
@@ -361,23 +366,29 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public ImportRecord call() throws Exception { public ImportRecord call() throws Exception {
String id = query.getParameterAsClass("id", String.class);
WebTarget getRecordTarget = pubmedWebTarget.queryParam("id", id); URIBuilder uriBuilder = new URIBuilder(urlFetch);
getRecordTarget = getRecordTarget.queryParam("retmode", "xml"); uriBuilder.addParameter("db", "pubmed");
getRecordTarget = getRecordTarget.path("efetch.fcgi"); uriBuilder.addParameter("retmode", "xml");
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
Invocation.Builder invocationBuilder = getRecordTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
Response response = invocationBuilder.get(); int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
List<Element> elements = splitToRecords(response.readEntity(String.class)); countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
if (elements.isEmpty()) {
return null;
} }
return transformSourceRecords(elements.get(0)); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
List<Element> elements = splitToRecords(response);
return elements.isEmpty() ? null : transformSourceRecords(elements.get(0));
} }
} }
@@ -396,40 +407,57 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Collection<ImportRecord> call() throws Exception { public Collection<ImportRecord> call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("term", String.class)); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget uriBuilder.addParameter("usehistory", "y");
.queryParam("field", query.getParameterAsClass("field", String.class)); uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("field", query.getParameterAsClass("field", String.class));
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi");
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
Response response = invocationBuilder.get(); int countAttempt = 0;
String responseString = response.readEntity(String.class); while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
String queryKey = getSingleElementValue(responseString, "QueryKey"); response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String webEnv = getSingleElementValue(responseString, "WebEnv");
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv);
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey);
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml");
getRecordsTarget = getRecordsTarget.path("efetch.fcgi");
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE);
response = invocationBuilder.get();
String xml = response.readEntity(String.class);
return parseXMLString(xml);
}
} }
if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
String webEnv = getSingleElementValue(response, "WebEnv");
String queryKey = getSingleElementValue(response, "QueryKey");
URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retmode", "xml");
uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
if (StringUtils.isBlank(response2)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder2.toString());
}
return parseXMLString(response2);
}
}
@Override @Override
public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException { public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException {
String xml = null;
try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) {
xml = CharStreams.toString(reader); String xml = CharStreams.toString(reader);
return parseXMLString(xml); return parseXMLString(xml);
} catch (IOException e) { } catch (IOException e) {
throw new FileSourceException ("Cannot read XML from InputStream", e); throw new FileSourceException ("Cannot read XML from InputStream", e);
@@ -456,4 +484,21 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
} }
return records; return records;
} }
public String getUrlFetch() {
return urlFetch;
}
public void setUrlFetch(String urlFetch) {
this.urlFetch = urlFetch;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
} }

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "pubmedEuropeMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,419 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
/**
* Implements a data source for querying PubMed Europe
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "pubmedeu";
}
/**
* Get a single record from the PubMed Europe.
*
* @param id Identifier for the record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query A query string to base the search on.
* @return The sum of the matching records over this import source
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
/**
* Find records based on a object query.
*
* @param query A query object to base the search on.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
/**
* Get a single record from the PubMed Europe.
*
* @param query A query matching a single record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Finds records based on query object.
*
* @param query A query object to base the search on.
* @return A collection of import records.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for PubMed Europe");
}
@Override
public void init() throws Exception {}
public List<ImportRecord> getByPubmedEuropeID(String pubmedID, Integer start, Integer size)
throws IOException, HttpException {
String query = "(EXT_ID:" + pubmedID + ")";
return search(query, size < 1 ? 1 : size, start);
}
/**
* This class is a Callable implementation to get PubMed Europe entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
return search(queryString, count, start);
}
}
/**
* This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0);
}
}
/**
* This class is a Callable implementation to search PubMed Europe entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
return search(title, author, year, maxResult, start);
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an PubMed Europe query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
try {
return count(query.getParameterAsClass("query", String.class));
} catch (Exception e) {
throw new RuntimeException();
}
}
}
/**
* Returns the total number of PubMed Europe publications returned by a specific query
*
* @param query A keyword or combination of keywords to be searched
* @throws URISyntaxException If URI syntax error
* @throws ClientProtocolException The client protocol exception
* @throws IOException If IO error
* @throws JaxenException If Xpath evaluation failed
*/
public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
Element element = root.getChild("hitCount");
return Integer.parseInt(element.getValue());
} catch (JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
public List<ImportRecord> search(String title, String author, int year, int count, int start)
throws IOException {
StringBuffer query = new StringBuffer();
query.append("(");
if (StringUtils.isNotBlank(title)) {
query.append("TITLE:").append(title);
query.append(")");
}
if (StringUtils.isNotBlank(author)) {
// Search for a surname and (optionally) initial(s) in publication author lists
// AUTH:einstein, AUTH:”Smith AB”
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
if (query.length() > 0) {
query.append(" AND ");
}
query.append("(");
int countAuthors = 0;
for (String auth : authors) {
countAuthors++;
query.append("AUTH:\"").append(auth).append("\"");
if (countAuthors < authors.length) {
query.append(" AND ");
}
}
query.append(")");
}
if (year != -1) {
if (query.length() > 0) {
query.append(" AND ");
}
query.append("( PUB_YEAR:").append(year).append(")");
}
query.append(")");
return search(query.toString(), count, start);
}
/**
* Returns a list of PubMed Europe publication records
*
* @param query A keyword or combination of keywords to be searched
* @param size The number of search results per page
* @param start Start number for the acquired search result list
* @throws IOException If IO error
*/
public List<ImportRecord> search(String query, Integer size, Integer start) throws IOException {
List<ImportRecord> results = new ArrayList<>();
try {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(size));
uriBuilder.addParameter("query", query);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
boolean lastPage = false;
int skipped = 0;
while (!lastPage || results.size() < size) {
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String cursorMark = StringUtils.EMPTY;
if (StringUtils.isNotBlank(response)) {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
XPathFactory xpfac = XPathFactory.instance();
XPathExpression<Element> xPath = xpfac.compile("//responseWrapper/resultList/result",
Filters.element());
List<Element> records = xPath.evaluate(document);
if (records.size() > 0) {
for (Element item : records) {
if (start > skipped) {
skipped++;
} else {
results.add(transformSourceRecords(item));
}
}
} else {
lastPage = true;
break;
}
Element root = document.getRootElement();
Element nextCursorMark = root.getChild("nextCursorMark");
cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY;
}
if (StringUtils.isNotBlank(cursorMark)) {
uriBuilder.setParameter("cursorMar", cursorMark);
} else {
lastPage = true;
}
}
} catch (URISyntaxException | JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
return results;
}
private String buildURI(Integer pageSize, String query) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(pageSize));
uriBuilder.addParameter("query", query);
return uriBuilder.toString();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class ScopusFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "scopusMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Scopus
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com)
*/
public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private int timeout = 1000;
int itemPerPage = 25;
private String url;
private String apiKey;
private String instKey;
private String viewMode;
@Autowired
private LiveImportClient liveImportClient;
public LiveImportClient getLiveImportClient() {
return liveImportClient;
}
public void setLiveImportClient(LiveImportClient liveImportClient) {
this.liveImportClient = liveImportClient;
}
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "scopus";
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query)).size();
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString())).size();
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query));
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = null;
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
if (isEID(query.toString())) {
records = retry(new FindByIdCallable(query.toString()));
} else {
records = retry(new SearchByQueryCallable(query));
}
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scopus");
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new FindByQueryCallable(query));
}
private boolean isEID(String query) {
Pattern pattern = Pattern.compile("2-s2\\.0-\\d+");
Matcher match = pattern.matcher(query);
if (match.matches()) {
return true;
}
return false;
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
// Execute the request.
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(query, null, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("opensearch:totalResults", Filters.element(), null, namespaces);
Element count = xpath.evaluateFirst(root);
try {
return Integer.parseInt(count.getText());
} catch (NumberFormatException e) {
return null;
}
}
return null;
}
}
/**
* This class is a Callable implementation to get a Scopus entry using EID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String eid;
private FindByIdCallable(String eid) {
this.eid = eid;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "EID(" + eid.replace("!", "/") + ")";
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* This class implements a callable to get the items based on query parameters
*/
private class FindByQueryCallable implements Callable<List<ImportRecord>> {
private String title;
private String author;
private Integer year;
private Integer start;
private Integer count;
private FindByQueryCallable(Query query) {
this.title = query.getParameterAsClass("title", String.class);
this.year = query.getParameterAsClass("year", Integer.class);
this.author = query.getParameterAsClass("author", String.class);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "";
StringBuffer query = new StringBuffer();
if (StringUtils.isNotBlank(title)) {
query.append("title(").append(title).append("");
}
if (StringUtils.isNotBlank(author)) {
// [FAU]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("AUTH(").append(author).append(")");
}
if (year != -1) {
// [DP]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("PUBYEAR IS ").append(year);
}
queryString = query.toString();
if (apiKey != null && !apiKey.equals("")) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
private Map<String, String> getRequestParameters(String query, String viewMode, Integer start, Integer count) {
Map<String, String> params = new HashMap<String, String>();
params.put("httpAccept", "application/xml");
params.put("apiKey", apiKey);
params.put("query", query);
if (StringUtils.isNotBlank(instKey)) {
params.put("insttoken", instKey);
}
if (StringUtils.isNotBlank(viewMode)) {
params.put("view", viewMode);
}
params.put("start", (Objects.nonNull(start) ? start + "" : "0"));
params.put("count", (Objects.nonNull(count) ? count + "" : "20"));
return params;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Element> records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom"));
return records;
} catch (JDOMException | IOException e) {
return new ArrayList<Element>();
}
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getViewMode() {
return viewMode;
}
public void setViewMode(String viewMode) {
this.viewMode = viewMode;
}
public String getApiKey() {
return apiKey;
}
public String getInstKey() {
return instKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public void setInstKey(String instKey) {
this.instKey = instKey;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class WOSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve
* metadata and metadata that will be set to the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "wosMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,329 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Web of Science.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private static final String AI_PATTERN = "^AI=(.*)";
private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$");
private int timeout = 1000;
private String url;
private String urlSearch;
private String apiKey;
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "wos";
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByQueryCallable(query));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8);
String url = urlSearch + queryString + "&count=1&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
XPathExpression<Element> xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]",
Filters.element(), null);
Element tot = xpath.evaluateFirst(root);
return Integer.valueOf(tot.getValue());
}
return null;
}
}
/**
* This class is a Callable implementation to get a Web of Science entry using Doi
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String doi;
private FindByIdCallable(String doi) {
this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
if (StringUtils.isNotBlank(apiKey)) {
String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = checkQuery(query.getParameterAsClass("query", String.class));
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8)
+ "&count=" + count + "&firstRecord=" + (start + 1);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> omElements = splitToRecords(response);
for (Element el : omElements) {
results.add(transformSourceRecords(el));
}
}
return results;
}
}
private Map<String, String> getRequestParameters() {
Map<String, String> params = new HashMap<String, String>();
params.put("Accept", "application/xml");
params.put("X-ApiKey", this.apiKey);
return params;
}
/**
* This method check if the query contain
* "AI=(...)" Author Identifier or a DOI "DO=(query)"
* or Accession Number "UT=(query)".
* Otherwise the value is placed in TS=(query) tag
* that searches for topic terms in the following fields within a document:
* Title, Abstract, Author keywords, Keywords Plus
*
* @param query
*/
private String checkQuery(String query) {
Pattern risPattern = Pattern.compile(AI_PATTERN);
Matcher risMatcher = risPattern.matcher(query.trim());
if (risMatcher.matches()) {
return query;
}
if (DoiCheck.isDoi(query)) {
// FIXME: workaround to be removed once fixed by the community the double post of query param
if (query.startsWith(",")) {
query = query.substring(1);
}
return "DO=(" + query + ")";
} else if (isIsi(query)) {
return "UT=(" + query + ")";
}
StringBuilder queryBuilder = new StringBuilder("TS=(");
queryBuilder.append(query).append(")");
return queryBuilder.toString();
}
private boolean isIsi(String query) {
if (query.startsWith("WOS:")) {
return true;
}
Matcher matcher = ISI_PATTERN.matcher(query.trim());
return matcher.matches();
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]",
Filters.element(), null).evaluate(root).get(0).getValue().trim();
Document intDocument = saxBuilder.build(new StringReader(cData));
XPathExpression<Element> xPath = XPathFactory.instance().compile("*", Filters.element(), null);
List<Element> records = xPath.evaluate(intDocument.getRootElement());
if (CollectionUtils.isNotEmpty(records)) {
return records;
}
} catch (JDOMException | IOException e) {
log.error(e.getMessage());
return new ArrayList<Element>();
}
return new ArrayList<Element>();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}

View File

@@ -10,6 +10,7 @@ package org.dspace.scripts;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.EnumType; import javax.persistence.EnumType;
@@ -33,6 +34,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/** /**
* This class is the DB Entity representation of the Process object to be stored in the Database * This class is the DB Entity representation of the Process object to be stored in the Database
@@ -77,6 +79,17 @@ public class Process implements ReloadableEntity<Integer> {
) )
private List<Bitstream> bitstreams; private List<Bitstream> bitstreams;
/*
* Special Groups associated with this Process
*/
@ManyToMany(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST})
@JoinTable(
name = "process2group",
joinColumns = {@JoinColumn(name = "process_id")},
inverseJoinColumns = {@JoinColumn(name = "group_id")}
)
private List<Group> groups;
@Column(name = "creation_time", nullable = false) @Column(name = "creation_time", nullable = false)
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
private Date creationTime; private Date creationTime;
@@ -211,6 +224,21 @@ public class Process implements ReloadableEntity<Integer> {
return creationTime; return creationTime;
} }
/**
* This method sets the special groups associated with the Process.
*/
public List<Group> getGroups() {
return groups;
}
/**
* This method will return special groups associated with the Process.
* @return The special groups of this process.
*/
public void setGroups(List<Group> groups) {
this.groups = groups;
}
/** /**
* Return <code>true</code> if <code>other</code> is the same Process * Return <code>true</code> if <code>other</code> is the same Process
* as this object, <code>false</code> otherwise * as this object, <code>false</code> otherwise

View File

@@ -21,6 +21,7 @@ import java.util.Comparator;
import java.util.Date; import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -43,6 +44,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -74,13 +76,21 @@ public class ProcessServiceImpl implements ProcessService {
@Override @Override
public Process create(Context context, EPerson ePerson, String scriptName, public Process create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) throws SQLException { List<DSpaceCommandLineParameter> parameters,
final Set<Group> specialGroups) throws SQLException {
Process process = new Process(); Process process = new Process();
process.setEPerson(ePerson); process.setEPerson(ePerson);
process.setName(scriptName); process.setName(scriptName);
process.setParameters(DSpaceCommandLineParameter.concatenate(parameters)); process.setParameters(DSpaceCommandLineParameter.concatenate(parameters));
process.setCreationTime(new Date()); process.setCreationTime(new Date());
Optional.ofNullable(specialGroups)
.ifPresent(sg -> {
// we use a set to be sure no duplicated special groups are stored with process
Set<Group> specialGroupsSet = new HashSet<>(sg);
process.setGroups(new ArrayList<>(specialGroupsSet));
});
Process createdProcess = processDAO.create(context, process); Process createdProcess = processDAO.create(context, process);
log.info(LogHelper.getHeader(context, "process_create", log.info(LogHelper.getHeader(context, "process_create",
"Process has been created for eperson with email " + ePerson.getEmail() "Process has been created for eperson with email " + ePerson.getEmail()

View File

@@ -10,7 +10,9 @@ package org.dspace.scripts.handler;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -114,4 +116,12 @@ public interface DSpaceRunnableHandler {
*/ */
public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) public void writeFilestream(Context context, String fileName, InputStream inputStream, String type)
throws IOException, SQLException, AuthorizeException; throws IOException, SQLException, AuthorizeException;
/**
* This method will return a List of UUIDs for the special groups
* associated with the processId contained by specific implementations of this interface.
* Otherwise, it returns an empty collection.
* @return List containing UUIDs of Special Groups of the associated Process.
*/
public List<UUID> getSpecialGroups();
} }

View File

@@ -10,7 +10,10 @@ package org.dspace.scripts.handler.impl;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
@@ -113,4 +116,9 @@ public class CommandLineDSpaceRunnableHandler implements DSpaceRunnableHandler {
File file = new File(fileName); File file = new File(fileName);
FileUtils.copyInputStreamToFile(inputStream, file); FileUtils.copyInputStreamToFile(inputStream, file);
} }
@Override
public List<UUID> getSpecialGroups() {
return Collections.emptyList();
}
} }

View File

@@ -11,11 +11,13 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessLogLevel; import org.dspace.scripts.ProcessLogLevel;
@@ -32,11 +34,14 @@ public interface ProcessService {
* @param ePerson The ePerson for which this process will be created on * @param ePerson The ePerson for which this process will be created on
* @param scriptName The script name to be used for the process * @param scriptName The script name to be used for the process
* @param parameters The parameters to be used for the process * @param parameters The parameters to be used for the process
* @param specialGroups Allows to set special groups, associated with application context when process is created,
* other than the ones derived from the eperson membership.
* @return The created process * @return The created process
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
public Process create(Context context, EPerson ePerson, String scriptName, public Process create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) throws SQLException; List<DSpaceCommandLineParameter> parameters,
final Set<Group> specialGroups) throws SQLException;
/** /**
* This method will retrieve a Process object from the Database with the given ID * This method will retrieve a Process object from the Database with the given ID

View File

@@ -21,6 +21,7 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema; import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue; import org.dspace.content.MetadataValue;
import org.dspace.content.RelationshipMetadataValue;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService; import org.dspace.content.service.BundleService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -55,14 +56,24 @@ public abstract class AbstractVersionProvider {
MetadataSchema metadataSchema = metadataField.getMetadataSchema(); MetadataSchema metadataSchema = metadataField.getMetadataSchema();
String unqualifiedMetadataField = metadataSchema.getName() + "." + metadataField.getElement(); String unqualifiedMetadataField = metadataSchema.getName() + "." + metadataField.getElement();
if (getIgnoredMetadataFields().contains(metadataField.toString('.')) || if (getIgnoredMetadataFields().contains(metadataField.toString('.')) ||
getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY)) { getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY) ||
//Skip this metadata field aMd instanceof RelationshipMetadataValue) {
//Skip this metadata field (ignored and/or virtual)
continue; continue;
} }
itemService itemService.addMetadata(
.addMetadata(context, itemNew, metadataField, aMd.getLanguage(), aMd.getValue(), aMd.getAuthority(), context,
aMd.getConfidence()); itemNew,
metadataField.getMetadataSchema().getName(),
metadataField.getElement(),
metadataField.getQualifier(),
aMd.getLanguage(),
aMd.getValue(),
aMd.getAuthority(),
aMd.getConfidence(),
aMd.getPlace()
);
} }
} }

View File

@@ -15,7 +15,9 @@ import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.WorkspaceItemService; import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.identifier.IdentifierException; import org.dspace.identifier.IdentifierException;
@@ -44,6 +46,8 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
protected VersioningService versioningService; protected VersioningService versioningService;
@Autowired(required = true) @Autowired(required = true)
protected IdentifierService identifierService; protected IdentifierService identifierService;
@Autowired(required = true)
protected RelationshipService relationshipService;
@Override @Override
public Item createNewItemAndAddItInWorkspace(Context context, Item nativeItem) { public Item createNewItemAndAddItInWorkspace(Context context, Item nativeItem) {
@@ -89,10 +93,18 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
} }
} }
/**
* Copy all data (minus a few exceptions) from the old item to the new item.
* @param c the DSpace context.
* @param itemNew the new version of the item.
* @param previousItem the old version of the item.
* @return the new version of the item, with data from the old item.
*/
@Override @Override
public Item updateItemState(Context c, Item itemNew, Item previousItem) { public Item updateItemState(Context c, Item itemNew, Item previousItem) {
try { try {
copyMetadata(c, itemNew, previousItem); copyMetadata(c, itemNew, previousItem);
copyRelationships(c, itemNew, previousItem);
createBundlesAndAddBitstreams(c, itemNew, previousItem); createBundlesAndAddBitstreams(c, itemNew, previousItem);
try { try {
identifierService.reserve(c, itemNew); identifierService.reserve(c, itemNew);
@@ -114,4 +126,49 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
} }
/**
* Copy all relationships of the old item to the new item.
* At this point in the lifecycle of the item-version (before archival), only the opposite item receives
* "latest" status. On item archival of the item-version, the "latest" status of the relevant relationships
* will be updated.
* @param context the DSpace context.
* @param newItem the new version of the item.
* @param oldItem the old version of the item.
*/
protected void copyRelationships(
Context context, Item newItem, Item oldItem
) throws SQLException, AuthorizeException {
List<Relationship> oldRelationships = relationshipService.findByItem(context, oldItem, -1, -1, false, true);
for (Relationship oldRelationship : oldRelationships) {
if (oldRelationship.getLeftItem().equals(oldItem)) {
// current item is on left side of this relationship
relationshipService.create(
context,
newItem, // new item
oldRelationship.getRightItem(),
oldRelationship.getRelationshipType(),
oldRelationship.getLeftPlace(),
oldRelationship.getRightPlace(),
oldRelationship.getLeftwardValue(),
oldRelationship.getRightwardValue(),
Relationship.LatestVersionStatus.RIGHT_ONLY // only mark the opposite side as "latest" for now
);
} else if (oldRelationship.getRightItem().equals(oldItem)) {
// current item is on right side of this relationship
relationshipService.create(
context,
oldRelationship.getLeftItem(),
newItem, // new item
oldRelationship.getRelationshipType(),
oldRelationship.getLeftPlace(),
oldRelationship.getRightPlace(),
oldRelationship.getLeftwardValue(),
oldRelationship.getRightwardValue(),
Relationship.LatestVersionStatus.LEFT_ONLY // only mark the opposite side as "latest" for now
);
}
}
}
} }

View File

@@ -22,5 +22,12 @@ public interface ItemVersionProvider {
public void deleteVersionedItem(Context c, Version versionToDelete, VersionHistory history) throws SQLException; public void deleteVersionedItem(Context c, Version versionToDelete, VersionHistory history) throws SQLException;
/**
* Copy all data (minus a few exceptions) from the old item to the new item.
* @param c the DSpace context.
* @param itemNew the new version of the item.
* @param previousItem the old version of the item.
* @return the new version of the item, with data from the old item.
*/
public Item updateItemState(Context c, Item itemNew, Item previousItem); public Item updateItemState(Context c, Item itemNew, Item previousItem);
} }

View File

@@ -7,39 +7,66 @@
*/ */
package org.dspace.versioning; package org.dspace.versioning;
import java.util.HashSet; import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES;
import java.util.Set;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.EntityType;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.IndexEventConsumer;
import org.dspace.event.Consumer; import org.dspace.event.Consumer;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog;
/** /**
* When a new version of an item is published, unarchive the previous version and
* update {@link Relationship#latestVersionStatus} of the relevant relationships.
*
* @author Fabio Bolognesi (fabio at atmire dot com) * @author Fabio Bolognesi (fabio at atmire dot com)
* @author Mark Diggory (markd at atmire dot com) * @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com) * @author Ben Bosman (ben at atmire dot com)
*/ */
public class VersioningConsumer implements Consumer { public class VersioningConsumer implements Consumer {
private static Set<Item> itemsToProcess; private static final Logger log = LogManager.getLogger(VersioningConsumer.class);
private Set<Item> itemsToProcess;
private VersionHistoryService versionHistoryService; private VersionHistoryService versionHistoryService;
private VersioningService versioningService;
private ItemService itemService; private ItemService itemService;
private EntityTypeService entityTypeService;
private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService;
private RelationshipVersioningUtils relationshipVersioningUtils;
@Override @Override
public void initialize() throws Exception { public void initialize() throws Exception {
versionHistoryService = VersionServiceFactory.getInstance().getVersionHistoryService(); versionHistoryService = VersionServiceFactory.getInstance().getVersionHistoryService();
versioningService = VersionServiceFactory.getInstance().getVersionService();
itemService = ContentServiceFactory.getInstance().getItemService(); itemService = ContentServiceFactory.getInstance().getItemService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils();
} }
@Override @Override
@@ -49,35 +76,397 @@ public class VersioningConsumer implements Consumer {
@Override @Override
public void consume(Context ctx, Event event) throws Exception { public void consume(Context ctx, Event event) throws Exception {
if (itemsToProcess == null) { if (itemsToProcess == null) {
itemsToProcess = new HashSet<Item>(); itemsToProcess = new HashSet<>();
} }
int st = event.getSubjectType(); // only items
int et = event.getEventType(); if (event.getSubjectType() != Constants.ITEM) {
return;
}
if (st == Constants.ITEM && et == Event.INSTALL) { // only install events
if (event.getEventType() != Event.INSTALL) {
return;
}
// get the item (should be archived)
Item item = (Item) event.getSubject(ctx); Item item = (Item) event.getSubject(ctx);
if (item != null && item.isArchived()) { if (item == null || !item.isArchived()) {
return;
}
// get version history
VersionHistory history = versionHistoryService.findByItem(ctx, item); VersionHistory history = versionHistoryService.findByItem(ctx, item);
if (history != null) { if (history == null) {
Version latest = versionHistoryService.getLatestVersion(ctx, history); return;
Version previous = versionHistoryService.getPrevious(ctx, history, latest); }
if (previous != null) {
Item previousItem = previous.getItem(); // get latest version
if (previousItem != null) { Version latestVersion = versionHistoryService.getLatestVersion(ctx, history);
previousItem.setArchived(false); if (latestVersion == null) {
itemsToProcess.add(previousItem); return;
}
// get previous version
Version previousVersion = versionHistoryService.getPrevious(ctx, history, latestVersion);
if (previousVersion == null) {
return;
}
// get latest item
Item latestItem = latestVersion.getItem();
if (latestItem == null) {
String msg = String.format(
"Illegal state: Obtained version history of item with uuid %s, handle %s, but the latest item is null",
item.getID(), item.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
// get previous item
Item previousItem = previousVersion.getItem();
if (previousItem == null) {
return;
}
// unarchive previous item
unarchiveItem(ctx, previousItem);
// update relationships
updateRelationships(ctx, latestItem, previousItem);
}
protected void unarchiveItem(Context ctx, Item item) {
item.setArchived(false);
itemsToProcess.add(item);
//Fire a new modify event for our previous item //Fire a new modify event for our previous item
//Due to the need to reindex the item in the search //Due to the need to reindex the item in the search
//and browse index we need to fire a new event //and browse index we need to fire a new event
ctx.addEvent(new Event(Event.MODIFY, ctx.addEvent(new Event(
previousItem.getType(), previousItem.getID(), Event.MODIFY, item.getType(), item.getID(), null, itemService.getIdentifiers(ctx, item)
null, itemService.getIdentifiers(ctx, previousItem))); ));
}
/**
* Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version
* of the item.
*
* This method will first locate all relationships that are eligible for an update,
* then it will try to match each of those relationships on the old version of given item
* with a relationship on the new version.
*
* One of the following scenarios will happen:
* - if a match is found, then the "latest" status on the side of given item is transferred from
* the old relationship to the new relationship. This implies that on the page of the third-party item,
* the old version of given item will NOT be shown anymore and the new version of given item will appear.
* Both versions of the given item still show the third-party item on their pages.
* - if a relationship only exists on the new version of given item, then this method does nothing.
* The status of those relationships should already have been set to "latest" on both sides during relationship
* creation.
* - if a relationship only exists on the old version of given item, then we assume that the relationship is no
* longer relevant to / has been removed from the new version of the item. The "latest" status is removed from
* the side of the given item. This implies that on the page of the third-party item,
* the relationship with given item will no longer be listed. The old version of given item still lists
* the third-party item and the new version doesn't.
* @param ctx the DSpace context.
* @param latestItem the new version of the item.
* @param previousItem the old version of the item.
*/
protected void updateRelationships(Context ctx, Item latestItem, Item previousItem) {
// check that the entity types of both items match
if (!doEntityTypesMatch(latestItem, previousItem)) {
return;
}
// get the entity type (same for both items)
EntityType entityType = getEntityType(ctx, latestItem);
if (entityType == null) {
return;
}
// get all relationship types that are linked to the given entity type
List<RelationshipType> relationshipTypes = getRelationshipTypes(ctx, entityType);
if (CollectionUtils.isEmpty(relationshipTypes)) {
return;
}
for (RelationshipType relationshipType : relationshipTypes) {
List<Relationship> latestItemRelationships = getAllRelationships(ctx, latestItem, relationshipType);
if (latestItemRelationships == null) {
continue;
}
List<Relationship> previousItemRelationships = getAllRelationships(ctx, previousItem, relationshipType);
if (previousItemRelationships == null) {
continue;
}
// NOTE: no need to loop through latestItemRelationships, because if no match can be found
// (meaning a relationship is only present on the new version of the item), then it's
// a newly added relationship and its status should have been set to BOTH during creation.
for (Relationship previousItemRelationship : previousItemRelationships) {
// determine on which side of the relationship the latest and previous item should be
boolean isLeft = previousItem.equals(previousItemRelationship.getLeftItem());
boolean isRight = previousItem.equals(previousItemRelationship.getRightItem());
if (isLeft == isRight) {
Item leftItem = previousItemRelationship.getLeftItem();
Item rightItem = previousItemRelationship.getRightItem();
String msg = String.format(
"Illegal state: could not determine side of item with uuid %s, handle %s in " +
"relationship with id %s, rightward name %s between " +
"left item with uuid %s, handle %s and right item with uuid %s, handle %s",
previousItem.getID(), previousItem.getHandle(), previousItemRelationship.getID(),
previousItemRelationship.getRelationshipType().getRightwardType(),
leftItem.getID(), leftItem.getHandle(), rightItem.getID(), rightItem.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
// get the matching relationship on the latest item
Relationship latestItemRelationship =
getMatchingRelationship(latestItem, isLeft, previousItemRelationship, latestItemRelationships);
// the other side of the relationship should be "latest", otherwise the relationship could not have been
// copied to the new item in the first place (by DefaultVersionProvider#copyRelationships)
if (relationshipVersioningUtils.otherSideIsLatest(
isLeft, previousItemRelationship.getLatestVersionStatus()
)) {
// Set the previous version of the item to non-latest. This implies that the previous version
// of the item will not be shown anymore on the page of the third-party item. That makes sense,
// because either the relationship has been deleted from the new version of the item (no match),
// or the matching relationship (linked to new version) will receive "latest" status in
// the next step.
LatestVersionStatusChangelog changelog =
relationshipVersioningUtils.updateLatestVersionStatus(previousItemRelationship, isLeft, false);
reindexRelationship(ctx, changelog, previousItemRelationship);
}
if (latestItemRelationship != null) {
// Set the new version of the item to latest if the relevant relationship exists (match found).
// This implies that the new version of the item will appear on the page of the third-party item.
// The old version of the item will not appear anymore on the page of the third-party item,
// see previous step.
LatestVersionStatusChangelog changelog =
relationshipVersioningUtils.updateLatestVersionStatus(latestItemRelationship, isLeft, true);
reindexRelationship(ctx, changelog, latestItemRelationship);
} }
} }
} }
} }
/**
* If the {@link Relationship#latestVersionStatus} of the relationship has changed,
* an "item modified" event should be fired for both the left and right item of the relationship.
* On one item the relation.* fields will change. On the other item the relation.*.latestForDiscovery will change.
* The event will cause the items to be re-indexed by the {@link IndexEventConsumer}.
* @param ctx the DSpace context.
* @param changelog indicates which side of the relationship has changed.
* @param relationship the relationship.
*/
protected void reindexRelationship(
Context ctx, LatestVersionStatusChangelog changelog, Relationship relationship
) {
if (changelog == NO_CHANGES) {
return;
} }
// on one item, relation.* fields will change
// on the other item, relation.*.latestForDiscovery will change
// reindex left item
Item leftItem = relationship.getLeftItem();
itemsToProcess.add(leftItem);
ctx.addEvent(new Event(
Event.MODIFY, leftItem.getType(), leftItem.getID(), null, itemService.getIdentifiers(ctx, leftItem)
));
// reindex right item
Item rightItem = relationship.getRightItem();
itemsToProcess.add(rightItem);
ctx.addEvent(new Event(
Event.MODIFY, rightItem.getType(), rightItem.getID(), null, itemService.getIdentifiers(ctx, rightItem)
));
}
/**
* Given two items, check if their entity types match.
* If one or both items don't have an entity type, comparing is pointless and this method will return false.
* @param latestItem the item that represents the most recent version.
* @param previousItem the item that represents the second-most recent version.
* @return true if the entity types of both items are non-null and equal, false otherwise.
*/
protected boolean doEntityTypesMatch(Item latestItem, Item previousItem) {
String latestItemEntityType = itemService.getEntityTypeLabel(latestItem);
String previousItemEntityType = itemService.getEntityTypeLabel(previousItem);
// check if both items have an entity type
if (latestItemEntityType == null || previousItemEntityType == null) {
if (previousItemEntityType != null) {
log.warn(
"Inconsistency: Item with uuid {}, handle {} has NO entity type, " +
"but the previous version of that item with uuid {}, handle {} has entity type {}",
latestItem.getID(), latestItem.getHandle(),
previousItem.getID(), previousItem.getHandle(), previousItemEntityType
);
}
// one or both items do not have an entity type, so comparing is pointless
return false;
}
// check if the entity types are equal
if (!StringUtils.equals(latestItemEntityType, previousItemEntityType)) {
log.warn(
"Inconsistency: Item with uuid {}, handle {} has entity type {}, " +
"but the previous version of that item with uuid {}, handle {} has entity type {}",
latestItem.getID(), latestItem.getHandle(), latestItemEntityType,
previousItem.getID(), previousItem.getHandle(), previousItemEntityType
);
return false;
}
// success - the entity types of both items are non-null and equal
log.info(
"Item with uuid {}, handle {} and the previous version of that item with uuid {}, handle {} " +
"have the same entity type: {}",
latestItem.getID(), latestItem.getHandle(), previousItem.getID(), previousItem.getHandle(),
latestItemEntityType
);
return true;
}
/**
* Get the entity type (stored in metadata field dspace.entity.type) of any item.
* @param item the item.
* @return the entity type.
*/
protected EntityType getEntityType(Context ctx, Item item) {
try {
return itemService.getEntityType(ctx, item);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain entity type with label {} of item with uuid {}, handle {}",
itemService.getEntityTypeLabel(item), item.getID(), item.getHandle(), e
);
return null;
}
}
/**
* Get all relationship types that have the given entity type on their left and/or right side.
* @param ctx the DSpace context.
* @param entityType the entity type for which all relationship types should be found.
* @return a list of relationship types (possibly empty), or null in case of error.
*/
protected List<RelationshipType> getRelationshipTypes(Context ctx, EntityType entityType) {
try {
return relationshipTypeService.findByEntityType(ctx, entityType);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain relationship types via entity type with id {}, label {}",
entityType.getID(), entityType.getLabel(), e
);
return null;
}
}
/**
* Get all relationships of the given type linked to the given item.
* @param ctx the DSpace context.
* @param item the item.
* @param relationshipType the relationship type.
* @return a list of relationships (possibly empty), or null in case of error.
*/
protected List<Relationship> getAllRelationships(Context ctx, Item item, RelationshipType relationshipType) {
try {
return relationshipService.findByItemAndRelationshipType(ctx, item, relationshipType, -1, -1, false);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain relationships of type with id {}, rightward name {} " +
"for item with uuid {}, handle {}",
relationshipType.getID(), relationshipType.getRightwardType(), item.getID(), item.getHandle(), e
);
return null;
}
}
/**
* From a list of relationships, find the relationship with the correct relationship type and items.
* If isLeft is true, the provided item should be on the left side of the relationship.
* If isLeft is false, the provided item should be on the right side of the relationship.
* In both cases, the other item is taken from the given relationship.
* @param latestItem the item that should either be on the left or right side of the returned relationship (if any).
* @param isLeft decide on which side of the relationship the provided item should be.
* @param previousItemRelationship the relationship from which the type and the other item are read.
* @param relationships the list of relationships that we'll search through.
* @return the relationship that satisfies the requirements (can only be one or zero).
*/
protected Relationship getMatchingRelationship(
Item latestItem, boolean isLeft, Relationship previousItemRelationship, List<Relationship> relationships
) {
Item leftItem = previousItemRelationship.getLeftItem();
RelationshipType relationshipType = previousItemRelationship.getRelationshipType();
Item rightItem = previousItemRelationship.getRightItem();
if (isLeft) {
return getMatchingRelationship(latestItem, relationshipType, rightItem, relationships);
} else {
return getMatchingRelationship(leftItem, relationshipType, latestItem, relationships);
}
}
/**
* Find the relationship with the given left item, relation type and right item, from a list of relationships.
* @param expectedLeftItem the relationship that we're looking for has this item on the left side.
* @param expectedRelationshipType the relationship that we're looking for has this relationship type.
* @param expectedRightItem the relationship that we're looking for has this item on the right side.
* @param relationships the list of relationships that we'll search through.
* @return the relationship that satisfies the requirements (can only be one or zero).
*/
protected Relationship getMatchingRelationship(
Item expectedLeftItem, RelationshipType expectedRelationshipType, Item expectedRightItem,
List<Relationship> relationships
) {
Integer expectedRelationshipTypeId = expectedRelationshipType.getID();
List<Relationship> matchingRelationships = relationships.stream()
.filter(relationship -> {
int relationshipTypeId = relationship.getRelationshipType().getID();
boolean leftItemMatches = expectedLeftItem.equals(relationship.getLeftItem());
boolean relationshipTypeMatches = expectedRelationshipTypeId == relationshipTypeId;
boolean rightItemMatches = expectedRightItem.equals(relationship.getRightItem());
return leftItemMatches && relationshipTypeMatches && rightItemMatches;
})
.distinct()
.collect(Collectors.toUnmodifiableList());
if (matchingRelationships.isEmpty()) {
return null;
}
// NOTE: this situation should never occur because the relationship table has a unique constraint
// over the "left_id", "type_id" and "right_id" columns
if (matchingRelationships.size() > 1) {
String msg = String.format(
"Illegal state: expected 0 or 1 relationship, but found %s relationships (ids: %s) " +
"of type with id %s, rightward name %s " +
"between left item with uuid %s, handle %s and right item with uuid %s, handle %s",
matchingRelationships.size(),
matchingRelationships.stream().map(Relationship::getID).collect(Collectors.toUnmodifiableList()),
expectedRelationshipTypeId, expectedRelationshipType.getRightwardType(),
expectedLeftItem.getID(), expectedLeftItem.getHandle(),
expectedRightItem.getID(), expectedRightItem.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
return matchingRelationships.get(0);
} }
@Override @Override

View File

@@ -10,6 +10,7 @@ package org.dspace.versioning.factory;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
/** /**
* Abstract factory to get services for the versioning package, use VersionServiceFactory.getInstance() to retrieve * Abstract factory to get services for the versioning package, use VersionServiceFactory.getInstance() to retrieve
@@ -23,6 +24,8 @@ public abstract class VersionServiceFactory {
public abstract VersioningService getVersionService(); public abstract VersioningService getVersionService();
public abstract RelationshipVersioningUtils getRelationshipVersioningUtils();
public static VersionServiceFactory getInstance() { public static VersionServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager() return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("versionServiceFactory", VersionServiceFactory.class); .getServiceByName("versionServiceFactory", VersionServiceFactory.class);

View File

@@ -9,6 +9,7 @@ package org.dspace.versioning.factory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
@@ -25,6 +26,9 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory {
@Autowired(required = true) @Autowired(required = true)
protected VersioningService versionService; protected VersioningService versionService;
@Autowired(required = true)
protected RelationshipVersioningUtils relationshipVersioningUtils;
@Override @Override
public VersionHistoryService getVersionHistoryService() { public VersionHistoryService getVersionHistoryService() {
return versionHistoryService; return versionHistoryService;
@@ -34,4 +38,10 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory {
public VersioningService getVersionService() { public VersioningService getVersionService() {
return versionService; return versionService;
} }
@Override
public RelationshipVersioningUtils getRelationshipVersioningUtils() {
return relationshipVersioningUtils;
}
} }

View File

@@ -0,0 +1,114 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.versioning.utils;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.LEFT_SIDE_CHANGED;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.RIGHT_SIDE_CHANGED;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Relationship;
import org.dspace.content.Relationship.LatestVersionStatus;
/**
* Class with utility methods to manipulate relationships that are linked to versioned items.
* Specifically focussed on the "latest version status" of relationships,
* which controls which related items are relevant (visible) to any given item.
*/
public class RelationshipVersioningUtils {
private static final Logger log = LogManager.getLogger(RelationshipVersioningUtils.class);
/**
* Given a latest version status, check if the other side is "latest".
* If we look from the left, this implies BOTH and RIGHT_ONLY return true.
* If we look from the right, this implies BOTH and LEFT_ONLY return true.
* @param isLeft whether we should look from the left or right side.
* @param latestVersionStatus the latest version status.
* @return true if the other side has "latest" status, false otherwise.
*/
public boolean otherSideIsLatest(boolean isLeft, LatestVersionStatus latestVersionStatus) {
if (latestVersionStatus == LatestVersionStatus.BOTH) {
return true;
}
return latestVersionStatus == (isLeft ? LatestVersionStatus.RIGHT_ONLY : LatestVersionStatus.LEFT_ONLY);
}
public enum LatestVersionStatusChangelog {
NO_CHANGES,
LEFT_SIDE_CHANGED,
RIGHT_SIDE_CHANGED
}
/**
* Update {@link Relationship#latestVersionStatus} of the given relationship.
* If isLatest = true, this method will never throw IllegalStateException.
* If isLatest = false, you should make sure that the selected side of given relationship
* currently has "latest" status, otherwise IllegalStateException will be thrown.
* @param relationship the relationship.
* @param updateLeftSide whether the status of the left item or the right item should be updated.
* @param isLatest to what the status should be set.
* @throws IllegalStateException if the operation would result in both the left side and the right side
* being set to non-latest.
*/
public LatestVersionStatusChangelog updateLatestVersionStatus(
Relationship relationship, boolean updateLeftSide, boolean isLatest
) throws IllegalStateException {
LatestVersionStatus lvs = relationship.getLatestVersionStatus();
boolean leftSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.LEFT_ONLY;
boolean rightSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.RIGHT_ONLY;
if (updateLeftSide) {
if (leftSideIsLatest == isLatest) {
return NO_CHANGES; // no change needed
}
leftSideIsLatest = isLatest;
} else {
if (rightSideIsLatest == isLatest) {
return NO_CHANGES; // no change needed
}
rightSideIsLatest = isLatest;
}
LatestVersionStatus newVersionStatus;
if (leftSideIsLatest && rightSideIsLatest) {
newVersionStatus = LatestVersionStatus.BOTH;
} else if (leftSideIsLatest) {
newVersionStatus = LatestVersionStatus.LEFT_ONLY;
} else if (rightSideIsLatest) {
newVersionStatus = LatestVersionStatus.RIGHT_ONLY;
} else {
String msg = String.format(
"Illegal state: cannot set %s item to latest = false, because relationship with id %s, " +
"rightward name %s between left item with uuid %s, handle %s and right item with uuid %s, handle %s " +
"has latest version status set to %s",
updateLeftSide ? "left" : "right", relationship.getID(),
relationship.getRelationshipType().getRightwardType(),
relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(),
relationship.getRightItem().getID(), relationship.getRightItem().getHandle(), lvs
);
log.error(msg);
throw new IllegalStateException(msg);
}
log.info(
"set latest version status from {} to {} for relationship with id {}, rightward name {} " +
"between left item with uuid {}, handle {} and right item with uuid {}, handle {}",
lvs, newVersionStatus, relationship.getID(), relationship.getRelationshipType().getRightwardType(),
relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(),
relationship.getRightItem().getID(), relationship.getRightItem().getHandle()
);
relationship.setLatestVersionStatus(newVersionStatus);
return updateLeftSide ? LEFT_SIDE_CHANGED : RIGHT_SIDE_CHANGED;
}
}

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -43,11 +43,11 @@
class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping"> class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping">
</bean> </bean>
<bean id="pubmedImportService" <bean id="pubmedImportService"
class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl"> class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl">
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/> <property name="urlFetch" value="${pubmed.url.fetch}"/>
<property name="urlSearch" value="${pubmed.url.search}"/>
<property name="generateQueryForItem" ref="pubmedService"></property> <property name="generateQueryForItem" ref="pubmedService"></property>
<property name="supportedExtensions"> <property name="supportedExtensions">
<list> <list>
@@ -56,7 +56,6 @@
</property> </property>
</bean> </bean>
<bean id="pubmedMetadataFieldMapping" <bean id="pubmedMetadataFieldMapping"
class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping"> class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping">
</bean> </bean>
@@ -120,6 +119,7 @@
<property name="url" value="${crossref.url}"/> <property name="url" value="${crossref.url}"/>
</bean> </bean>
<bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/> <bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/>
<bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton"> <bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/>
<property name="consumerKey" value="${epo.consumerKey}" /> <property name="consumerKey" value="${epo.consumerKey}" />
@@ -133,6 +133,15 @@
</bean> </bean>
<bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/> <bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/>
<bean id="ScopusImportService" class="org.dspace.importer.external.scopus.service.ScopusImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="scopusMetadataFieldMapping"/>
<property name="url" value="${scopus.url}"/>
<property name="apiKey" value="${scopus.apiKey}"/>
<property name="instKey" value="${scopus.instToken}"/>
<property name="viewMode" value="${scopus.search-api.viewMode}"/>
</bean>
<bean id="scopusMetadataFieldMapping" class="org.dspace.importer.external.scopus.service.ScopusFieldMapping"/>
<bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton"> <bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton">
<!-- Set to empty to use the default set of fields --> <!-- Set to empty to use the default set of fields -->
<constructor-arg type="java.lang.String" value=""/> <constructor-arg type="java.lang.String" value=""/>
@@ -148,6 +157,27 @@
</bean> </bean>
<bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/> <bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/>
<bean id="WosImportService" class="org.dspace.importer.external.wos.service.WOSImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="wosMetadataFieldMapping"/>
<property name="apiKey" value="${wos.apiKey}"/>
<property name="url" value="${wos.url}"/>
<property name="urlSearch" value="${wos.url.search}"/>
</bean>
<bean id="wosMetadataFieldMapping" class="org.dspace.importer.external.wos.service.WOSFieldMapping"/>
<bean id="PubmedEuropeImportService" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="PubmedEuropeMetadataFieldMapping"/>
<property name="url" value="${pubmedeurope.url}"/>
</bean>
<bean id="PubmedEuropeMetadataFieldMapping" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeFieldMapping"/>
<bean id="CiniiImportService" class="org.dspace.importer.external.cinii.CiniiImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="CiniiMetadataFieldMapping"/>
<property name="url" value="${cinii.url}"/>
<property name="urlSearch" value="${cinii.url.search}"/>
</bean>
<bean id="CiniiMetadataFieldMapping" class="org.dspace.importer.external.cinii.CiniiFieldMapping"/>
<bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton"> <bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton">
<property name="apiKey" value="${ads.key}" /> <property name="apiKey" value="${ads.key}" />
<property name="url" value="${ads.url}" /> <property name="url" value="${ads.url}" />

View File

@@ -145,6 +145,8 @@ useProxies = true
proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true proxies.trusted.include_ui_ip = true
csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports
# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN # For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN
management.health.solrOai.enabled = false management.health.solrOai.enabled = false

View File

@@ -59,22 +59,26 @@
</bean> </bean>
<bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider"> <bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
<property name="metadataSource" ref="mockPubmedImportService"/> <property name="metadataSource" ref="pubmedImportService"/>
<property name="sourceIdentifier" value="pubmed"/> <property name="sourceIdentifier" value="pubmed"/>
<property name="recordIdMetadata" value="dc.identifier.other"/> <property name="recordIdMetadata" value="dc.identifier.other"/>
<property name="supportedEntityTypes">
<list>
<value>Publication</value>
<value>none</value>
</list>
</property>
</bean> </bean>
<bean id="mockPubmedImportService" <bean id="scopusLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
class="org.dspace.external.provider.impl.MockPubmedImportMetadataSourceServiceImpl"> <property name="metadataSource" ref="ScopusImportService"/>
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/> <property name="sourceIdentifier" value="scopus"/>
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/> <property name="recordIdMetadata" value="dc.identifier.scopus"/>
<property name="supportedExtensions"> <property name="supportedEntityTypes">
<list> <list>
<value>xml</value> <value>Publication</value>
</list> </list>
</property> </property>
</bean> </bean>
</beans> </beans>

View File

@@ -22,6 +22,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/> <property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/>
</bean> </bean>
<bean id="metadata-export-search" class="org.dspace.app.bulkedit.MetadataExportSearchCliScriptConfiguration">
<property name="description" value="export metadata from a discovery search" />
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportSearchCli" />
</bean>
<bean id="curate" <bean id="curate"
class="org.dspace.curate.CurationCliScriptConfiguration"> class="org.dspace.curate.CurationCliScriptConfiguration">
<property name="description" <property name="description"
@@ -45,6 +50,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/> <property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/>
</bean> </bean>
<bean id="solr-database-resync" class="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCliScriptConfiguration">
<property name="description" value="Update the database status of Items in solr"/>
<property name="dspaceRunnableClass" value="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli"/>
</bean>
<bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype"> <bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" /> <property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/> <property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

View File

@@ -0,0 +1,253 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.google.common.io.Files;
import com.opencsv.CSVReader;
import com.opencsv.exceptions.CsvException;
import org.apache.log4j.Logger;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase {
private String subject1 = "subject1";
private String subject2 = "subject2";
private int numberItemsSubject1 = 30;
private int numberItemsSubject2 = 2;
private Item[] itemsSubject1 = new Item[numberItemsSubject1];
private Item[] itemsSubject2 = new Item[numberItemsSubject2];
private String filename;
private Collection collection;
private Logger logger = Logger.getLogger(MetadataExportSearchIT.class);
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
private SearchService searchService = SearchUtils.getSearchService();
@Override
@Before
public void setUp() throws Exception {
super.setUp();
// dummy search so that the SearchService gets called in a test context first
DiscoverQuery query = new DiscoverQuery();
query.setMaxResults(0);
searchService.search(context, query);
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context).build();
collection = CollectionBuilder.createCollection(context, community).build();
filename = configurationService.getProperty("dspace.dir")
+ testProps.get("test.exportcsv").toString();
for (int i = 0; i < numberItemsSubject1; i++) {
itemsSubject1[i] = ItemBuilder.createItem(context, collection)
.withTitle(String.format("%s item %d", subject1, i))
.withSubject(subject1)
.withIssueDate("2020-09-" + i)
.build();
}
for (int i = 0; i < numberItemsSubject2; i++) {
itemsSubject2[i] = ItemBuilder.createItem(context, collection)
.withTitle(String.format("%s item %d", subject2, i))
.withSubject(subject2)
.withIssueDate("2021-09-" + i)
.build();
}
context.restoreAuthSystemState();
}
private void checkItemsPresentInFile(String filename, Item[] items) throws IOException, CsvException {
File file = new File(filename);
Reader reader = Files.newReader(file, Charset.defaultCharset());
CSVReader csvReader = new CSVReader(reader);
List<String[]> lines = csvReader.readAll();
//length + 1 is because of 1 row extra for the headers
assertEquals(items.length + 1, lines.size());
List<String> ids = new ArrayList<>();
//ignoring the first row as this only contains headers;
logger.debug("checking content of lines");
for (int i = 1; i < lines.size(); i++) {
logger.debug(String.join(", ", lines.get(i)));
ids.add(lines.get(i)[0]);
}
for (Item item : items) {
assertTrue(ids.contains(item.getID().toString()));
}
}
@Test
public void metadateExportSearchQueryTest() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-q", "subject:" + subject1, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
result = runDSpaceScript("metadata-export-search", "-q", "subject: " + subject2, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject2);
}
@Test
public void exportMetadataSearchSpecificContainerTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community2 = CommunityBuilder.createCommunity(context).build();
Collection collection2 = CollectionBuilder.createCollection(context, community2).build();
int numberItemsDifferentCollection = 15;
Item[] itemsDifferentCollection = new Item[numberItemsDifferentCollection];
for (int i = 0; i < numberItemsDifferentCollection; i++) {
itemsDifferentCollection[i] = ItemBuilder.createItem(context, collection2)
.withTitle("item different collection " + i)
.withSubject(subject1)
.build();
}
//creating some items with a different subject to make sure the query still works
for (int i = 0; i < 5; i++) {
ItemBuilder.createItem(context, collection2)
.withTitle("item different collection, different subject " + i)
.withSubject(subject2)
.build();
}
context.restoreAuthSystemState();
int result = runDSpaceScript(
"metadata-export-search", "-q", "subject: " + subject1, "-s", collection2.getID().toString(), "-n", filename
);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsDifferentCollection);
}
@Test
public void exportMetadataSearchFilter() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=" + subject1, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
}
@Test
public void exportMetadataSearchFilterDate() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "dateIssued,equals=[2000 TO 2020]", "-n", filename
);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
}
@Test
public void exportMetadataSearchMultipleFilters() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "subject,equals=" + subject1, "-f",
"title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename
);
assertEquals(0, result);
Item[] expectedResult = Arrays.copyOfRange(itemsSubject1, 0, 1);
checkItemsPresentInFile(filename, expectedResult);
}
@Test
public void exportMetadataSearchEqualsFilterTest()
throws Exception {
context.turnOffAuthorisationSystem();
Item wellBeingItem = ItemBuilder.createItem(context, collection)
.withTitle("test item well-being")
.withSubject("well-being")
.build();
ItemBuilder.createItem(context, collection)
.withTitle("test item financial well-being")
.withSubject("financial well-being")
.build();
context.restoreAuthSystemState();
int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=well-being", "-n", filename);
assertEquals(0, result);
Item[] expectedResult = new Item[] {wellBeingItem};
checkItemsPresentInFile(filename, expectedResult);
}
@Test
public void exportMetadataSearchInvalidDiscoveryQueryTest() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-q", "blabla", "-n", filename);
assertEquals(0, result);
Item[] items = {};
checkItemsPresentInFile(filename, items);
}
@Test
public void exportMetadataSearchNoResultsTest() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "subject,equals=notExistingSubject", "-n", filename
);
assertEquals(0, result);
Item[] items = {};
checkItemsPresentInFile(filename, items);
}
@Test
public void exportMetadataSearchNonExistinFacetsTest() throws Exception {
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] {"metadata-export-search", "-f", "nonExisting,equals=" + subject1, "-f",
"title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename};
int result = ScriptLauncher.handleScript(
args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl
);
assertEquals(0, result); // exception should be handled, so the script should finish with 0
Exception exception = testDSpaceRunnableHandler.getException();
assertNotNull(exception);
assertEquals("nonExisting is not a valid search filter", exception.getMessage());
}
}

View File

@@ -19,6 +19,7 @@ import java.util.List;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.collections4.IteratorUtils; import org.apache.commons.collections4.IteratorUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher; import org.dspace.app.launcher.ScriptLauncher;
@@ -93,10 +94,10 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase {
} }
@Test @Test
public void metadataImportIntoCollectionWithEntityTypeTest() throws Exception { public void metadataImportIntoCollectionWithEntityTypeWithTemplateEnabledTest() throws Exception {
String[] csv = {"id,collection,dc.title,dc.contributor.author", String[] csv = {"id,collection,dc.title,dc.contributor.author",
"+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""}; "+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""};
performImportScript(csv); performImportScript(csv, true);
Item importedItem = findItemByName("Test Import 1"); Item importedItem = findItemByName("Test Import 1");
assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY) assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY)
.get(0).getValue(), "Donald, SmithImported")); .get(0).getValue(), "Donald, SmithImported"));
@@ -110,6 +111,24 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase {
context.restoreAuthSystemState(); context.restoreAuthSystemState();
} }
@Test
public void metadataImportIntoCollectionWithEntityTypeWithTemplateDisabledTest() throws Exception {
String[] csv = {"id,collection,dc.title,dc.contributor.author",
"+," + publicationCollection.getHandle() + ",\"Test Import 1\"," + "\"Donald, SmithImported\""};
performImportScript(csv, false);
Item importedItem = findItemByName("Test Import 1");
assertTrue(StringUtils.equals(itemService.getMetadata(importedItem, "dc", "contributor", "author", Item.ANY)
.get(0).getValue(), "Donald, SmithImported"));
assertEquals(0, itemService.getMetadata(importedItem, "dspace", "entity", "type", Item.ANY)
.size());
eperson = ePersonService.findByEmail(context, eperson.getEmail());
assertEquals(importedItem.getSubmitter(), eperson);
context.turnOffAuthorisationSystem();
itemService.delete(context, itemService.find(context, importedItem.getID()));
context.restoreAuthSystemState();
}
@Test(expected = ParseException.class) @Test(expected = ParseException.class)
public void metadataImportWithoutEPersonParameterTest() public void metadataImportWithoutEPersonParameterTest()
throws IllegalAccessException, InstantiationException, ParseException { throws IllegalAccessException, InstantiationException, ParseException {
@@ -227,12 +246,16 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase {
return importedItem; return importedItem;
} }
public void performImportScript(String[] csv) throws Exception {
performImportScript(csv, false);
}
/** /**
* Import mocked CSVs to test item creation behavior, deleting temporary file afterward. * Import mocked CSVs to test item creation behavior, deleting temporary file afterward.
* @param csv content for test file. * @param csv content for test file.
* @throws java.lang.Exception passed through. * @throws java.lang.Exception passed through.
*/ */
public void performImportScript(String[] csv) throws Exception { public void performImportScript(String[] csv, boolean useTemplate) throws Exception {
File csvFile = File.createTempFile("dspace-test-import", "csv"); File csvFile = File.createTempFile("dspace-test-import", "csv");
BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile), "UTF-8")); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile), "UTF-8"));
for (String csvLine : csv) { for (String csvLine : csv) {
@@ -243,6 +266,9 @@ public class MetadataImportIT extends AbstractIntegrationTestWithDatabase {
String fileLocation = csvFile.getAbsolutePath(); String fileLocation = csvFile.getAbsolutePath();
try { try {
String[] args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s"}; String[] args = new String[] {"metadata-import", "-f", fileLocation, "-e", eperson.getEmail(), "-s"};
if (useTemplate) {
args = ArrayUtils.add(args, "-t");
}
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler(); TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher ScriptLauncher
.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl); .handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);

View File

@@ -0,0 +1,154 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.discovery.MockSolrSearchCore;
import org.dspace.kernel.ServiceManager;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before;
import org.junit.Test;
public class SolrDatabaseResyncIT extends AbstractIntegrationTestWithDatabase {
private final ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
private final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
private MockSolrSearchCore searchService;
private Collection col;
private Item item1;
private Item item2;
@Before
public void setUp() throws Exception {
super.setUp();
configurationService.setProperty("solr-database-resync.time-until-reindex", 1);
ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class);
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build();
col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build();
item1 = ItemBuilder.createItem(context, col)
.withTitle("Public item 1")
.withIssueDate("2010-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
item2 = ItemBuilder.createItem(context, col)
.withTitle("Public item 2")
.withIssueDate("2011-08-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
context.setDispatcher("noindex");
}
@Test
public void solrPreDBStatusExistingItemTest() throws Exception {
// Items were created, they should contain a predb status in solr
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
performSolrDatabaseResyncScript();
// Database status script was performed, their predb status should be removed
assertHasNoPreDBStatus(item1);
assertHasNoPreDBStatus(item2);
context.restoreAuthSystemState();
}
@Test
public void solrPreDBStatusRemovedItemTest() throws Exception {
// Items were created, they should contain a predb status in solr
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
collectionService.delete(context, col);
// Items were deleted, they should still contain a predb status in solr for now
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
performSolrDatabaseResyncScript();
// Database status script was performed, their solr document should have been removed
assertNoSolrDocument(item1);
assertNoSolrDocument(item2);
context.restoreAuthSystemState();
}
public void assertHasNoPreDBStatus(Item item) throws Exception {
assertNotEquals(STATUS_FIELD_PREDB, getStatus(item));
}
public void assertHasPreDBStatus(Item item) throws Exception {
assertEquals(STATUS_FIELD_PREDB, getStatus(item));
}
public void assertNoSolrDocument(Item item) throws Exception {
SolrDocumentList solrDocumentList = getSolrDocumentList(item);
assertEquals(0, solrDocumentList.size());
}
public String getStatus(Item item) throws Exception {
SolrDocumentList solrDocumentList = getSolrDocumentList(item);
List fieldValues = ((List) solrDocumentList.get(0).getFieldValues(STATUS_FIELD));
if (CollectionUtils.isNotEmpty(fieldValues)) {
return (String) fieldValues.get(0);
} else {
return null;
}
}
public SolrDocumentList getSolrDocumentList(Item item) throws Exception {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery("search.resourceid:" + item.getID());
QueryResponse queryResponse = searchService.getSolr().query(solrQuery);
return queryResponse.getResults();
}
public void performSolrDatabaseResyncScript() throws Exception {
String[] args = new String[] {"solr-database-resync"};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher
.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
}
}

View File

@@ -57,7 +57,7 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
this.context = context; this.context = context;
try { try {
workspaceItem = workspaceItemService.create(context, col, false); workspaceItem = workspaceItemService.create(context, col, true);
item = workspaceItem.getItem(); item = workspaceItem.getItem();
} catch (Exception e) { } catch (Exception e) {
return handleException(e); return handleException(e);

View File

@@ -12,11 +12,13 @@ import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.List; import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
@@ -33,14 +35,22 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
List<DSpaceCommandLineParameter> parameters) List<DSpaceCommandLineParameter> parameters)
throws SQLException { throws SQLException {
ProcessBuilder processBuilder = new ProcessBuilder(context); ProcessBuilder processBuilder = new ProcessBuilder(context);
return processBuilder.create(context, ePerson, scriptName, parameters); return processBuilder.create(context, ePerson, scriptName, parameters, null);
}
public static ProcessBuilder createProcess(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters,
Set<Group> specialGroups)
throws SQLException {
ProcessBuilder processBuilder = new ProcessBuilder(context);
return processBuilder.create(context, ePerson, scriptName, parameters, specialGroups);
} }
private ProcessBuilder create(Context context, EPerson ePerson, String scriptName, private ProcessBuilder create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) List<DSpaceCommandLineParameter> parameters, final Set<Group> specialGroups)
throws SQLException { throws SQLException {
this.context = context; this.context = context;
this.process = processService.create(context, ePerson, scriptName, parameters); this.process = processService.create(context, ePerson, scriptName, parameters, specialGroups);
this.process.setProcessStatus(ProcessStatus.SCHEDULED); this.process.setProcessStatus(ProcessStatus.SCHEDULED);
return this; return this;
} }

View File

@@ -107,18 +107,26 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
} }
public static RelationshipBuilder createRelationshipBuilder(Context context, Item leftItem, Item rightItem, public static RelationshipBuilder createRelationshipBuilder(Context context, Item leftItem, Item rightItem,
RelationshipType relationshipType) { RelationshipType relationshipType, int leftPlace, int rightPlace) {
RelationshipBuilder relationshipBuilder = new RelationshipBuilder(context); RelationshipBuilder relationshipBuilder = new RelationshipBuilder(context);
return relationshipBuilder.create(context, leftItem, rightItem, relationshipType); return relationshipBuilder.create(context, leftItem, rightItem, relationshipType, leftPlace, rightPlace);
}
public static RelationshipBuilder createRelationshipBuilder(Context context, Item leftItem, Item rightItem,
RelationshipType relationshipType) {
return createRelationshipBuilder(context, leftItem, rightItem, relationshipType, -1, -1);
} }
private RelationshipBuilder create(Context context, Item leftItem, Item rightItem, private RelationshipBuilder create(Context context, Item leftItem, Item rightItem,
RelationshipType relationshipType) { RelationshipType relationshipType, int leftPlace, int rightPlace) {
this.context = context; this.context = context;
try { try {
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0); //place -1 will add it to the end
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType,
leftPlace, rightPlace);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
log.warn("Failed to create relationship", e); log.warn("Failed to create relationship", e);
} }
@@ -140,4 +148,10 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
relationship.setLeftPlace(leftPlace); relationship.setLeftPlace(leftPlace);
return this; return this;
} }
public RelationshipBuilder withLatestVersionStatus(Relationship.LatestVersionStatus latestVersionStatus) {
relationship.setLatestVersionStatus(latestVersionStatus);
return this;
}
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.content; package org.dspace.content;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import java.util.List; import java.util.List;
@@ -71,19 +72,28 @@ public class LeftTiltedRelationshipMetadataServiceIT extends RelationshipMetadat
//request the virtual metadata of the publication only //request the virtual metadata of the publication only
List<RelationshipMetadataValue> leftList = relationshipMetadataService List<RelationshipMetadataValue> leftList = relationshipMetadataService
.getRelationshipMetadata(leftItem, true); .getRelationshipMetadata(leftItem, true);
assertThat(leftList.size(), equalTo(2)); assertThat(leftList.size(), equalTo(3));
assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName"));
assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(),
assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication"));
assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName"));
assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc"));
equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor"));
assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author"));
assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication"));
assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue());
assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID()));
// rightItem is the author // rightItem is the author
List<MetadataValue> rightRelationshipMetadataList = itemService List<MetadataValue> rightRelationshipMetadataList = itemService
.getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY);

View File

@@ -8,6 +8,7 @@
package org.dspace.content; package org.dspace.content;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
@@ -186,19 +187,28 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa
//request the virtual metadata of the publication only //request the virtual metadata of the publication only
List<RelationshipMetadataValue> leftList = relationshipMetadataService List<RelationshipMetadataValue> leftList = relationshipMetadataService
.getRelationshipMetadata(leftItem, true); .getRelationshipMetadata(leftItem, true);
assertThat(leftList.size(), equalTo(2)); assertThat(leftList.size(), equalTo(3));
assertThat(leftList.get(0).getValue(), equalTo("familyName, firstName"));
assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("dc")); assertThat(leftList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("contributor")); assertThat(leftList.get(0).getMetadataField().getMetadataSchema().getName(),
assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("author")); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(leftList.get(0).getMetadataField().getElement(), equalTo("isAuthorOfPublication"));
assertThat(leftList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(leftList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(leftList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); assertThat(leftList.get(1).getValue(), equalTo("familyName, firstName"));
assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), assertThat(leftList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("dc"));
equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("contributor"));
assertThat(leftList.get(1).getMetadataField().getElement(), equalTo("isAuthorOfPublication")); assertThat(leftList.get(1).getMetadataField().getQualifier(), equalTo("author"));
assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(leftList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(leftList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(leftList.get(2).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(leftList.get(2).getMetadataField().getElement(), equalTo("isAuthorOfPublication"));
assertThat(leftList.get(2).getMetadataField().getQualifier(), nullValue());
assertThat(leftList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID()));
// rightItem is the author // rightItem is the author
List<MetadataValue> rightRelationshipMetadataList = itemService List<MetadataValue> rightRelationshipMetadataList = itemService
.getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY); .getMetadata(rightItem, MetadataSchemaEnum.RELATION.getName(), "isPublicationOfAuthor", null, Item.ANY);
@@ -208,12 +218,21 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa
//request the virtual metadata of the publication //request the virtual metadata of the publication
List<RelationshipMetadataValue> rightList = relationshipMetadataService List<RelationshipMetadataValue> rightList = relationshipMetadataService
.getRelationshipMetadata(rightItem, true); .getRelationshipMetadata(rightItem, true);
assertThat(rightList.size(), equalTo(1)); assertThat(rightList.size(), equalTo(2));
assertThat(rightList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID()))); assertThat(rightList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(rightList.get(0).getMetadataField().getMetadataSchema().getName(), assertThat(rightList.get(0).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName())); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(rightList.get(0).getMetadataField().getElement(), equalTo("isPublicationOfAuthor")); assertThat(rightList.get(0).getMetadataField().getElement(), equalTo("isPublicationOfAuthor"));
assertThat(rightList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(rightList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(rightList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(rightList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(rightList.get(1).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(rightList.get(1).getMetadataField().getElement(), equalTo("isPublicationOfAuthor"));
assertThat(rightList.get(1).getMetadataField().getQualifier(), nullValue());
assertThat(rightList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
} }
@Test @Test
@@ -380,34 +399,52 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa
//request the virtual metadata of the journal issue //request the virtual metadata of the journal issue
List<RelationshipMetadataValue> issueRelList = List<RelationshipMetadataValue> issueRelList =
relationshipMetadataService.getRelationshipMetadata(leftItem, true); relationshipMetadataService.getRelationshipMetadata(leftItem, true);
assertThat(issueRelList.size(), equalTo(2)); assertThat(issueRelList.size(), equalTo(3));
assertThat(issueRelList.get(0).getValue(), equalTo("30"));
assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume")); assertThat(issueRelList.get(0).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("volumeNumber")); assertThat(issueRelList.get(0).getMetadataField().getMetadataSchema().getName(),
assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(issueRelList.get(0).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue"));
assertThat(issueRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(issueRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(issueRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(issueRelList.get(1).getValue(), equalTo(String.valueOf(rightItem.getID()))); assertThat(issueRelList.get(1).getValue(), equalTo("30"));
assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), assertThat(issueRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationvolume"));
equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("volumeNumber"));
assertThat(issueRelList.get(1).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue")); assertThat(issueRelList.get(1).getMetadataField().getQualifier(), equalTo(null));
assertThat(issueRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(issueRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(issueRelList.get(2).getValue(), equalTo(String.valueOf(rightItem.getID())));
assertThat(issueRelList.get(2).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(issueRelList.get(2).getMetadataField().getElement(), equalTo("isJournalVolumeOfIssue"));
assertThat(issueRelList.get(2).getMetadataField().getQualifier(), nullValue());
assertThat(issueRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID()));
//request the virtual metadata of the journal volume //request the virtual metadata of the journal volume
List<RelationshipMetadataValue> volumeRelList = List<RelationshipMetadataValue> volumeRelList =
relationshipMetadataService.getRelationshipMetadata(rightItem, true); relationshipMetadataService.getRelationshipMetadata(rightItem, true);
assertThat(volumeRelList.size(), equalTo(2)); assertThat(volumeRelList.size(), equalTo(3));
assertThat(volumeRelList.get(0).getValue(), equalTo("2"));
assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(),
assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume"));
assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); assertThat(volumeRelList.get(1).getValue(), equalTo("2"));
assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue"));
equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber"));
assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null));
assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume"));
assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue());
assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID()));
} }
@Test @Test
@@ -614,45 +651,6 @@ public class RelationshipMetadataServiceIT extends AbstractIntegrationTestWithDa
.size(), equalTo(1)); .size(), equalTo(1));
} }
@Test
public void testGetNextRightPlace() throws Exception {
assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(0));
initPublicationAuthor();
assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(1));
context.turnOffAuthorisationSystem();
Item secondItem = ItemBuilder.createItem(context, col).build();
RelationshipBuilder.createRelationshipBuilder(context, secondItem, rightItem,
isAuthorOfPublicationRelationshipType).build();
context.restoreAuthSystemState();
assertThat(relationshipService.findNextRightPlaceByRightItem(context, rightItem), equalTo(2));
}
@Test
public void testGetNextLeftPlace() throws Exception {
assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(0));
initPublicationAuthor();
assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(1));
context.turnOffAuthorisationSystem();
Item secondAuthor = ItemBuilder.createItem(context, col2)
.withPersonIdentifierFirstName("firstName")
.withPersonIdentifierLastName("familyName").build();
RelationshipBuilder.createRelationshipBuilder(context, leftItem, secondAuthor,
isAuthorOfPublicationRelationshipType).build();
context.restoreAuthSystemState();
assertThat(relationshipService.findNextLeftPlaceByLeftItem(context, leftItem), equalTo(2));
}
@Test @Test
public void testGetVirtualMetadata() throws SQLException, AuthorizeException { public void testGetVirtualMetadata() throws SQLException, AuthorizeException {
// Journal, JournalVolume, JournalIssue, Publication items, related to each other using the relationship types // Journal, JournalVolume, JournalIssue, Publication items, related to each other using the relationship types

View File

@@ -24,12 +24,14 @@ import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class) @RunWith(MockitoJUnitRunner.class)
@@ -71,6 +73,9 @@ public class RelationshipServiceImplTest {
@Mock @Mock
private ConfigurationService configurationService; private ConfigurationService configurationService;
@Spy
private RelationshipVersioningUtils relationshipVersioningUtils;
@Before @Before
public void init() { public void init() {
relationshipsList = new ArrayList<>(); relationshipsList = new ArrayList<>();
@@ -112,9 +117,6 @@ public class RelationshipServiceImplTest {
relationshipTest.add(getRelationship(bob, cindy, hasMother,1,0)); relationshipTest.add(getRelationship(bob, cindy, hasMother,1,0));
when(relationshipService.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest); when(relationshipService.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest);
// Mock the state of objects utilized in findByItem() to meet the success criteria of the invocation
when(relationshipDAO.findByItem(context, cindy, -1, -1, false)).thenReturn(relationshipTest);
List<Relationship> results = relationshipService.findByItem(context, cindy); List<Relationship> results = relationshipService.findByItem(context, cindy);
assertEquals("TestFindByItem 0", relationshipTest, results); assertEquals("TestFindByItem 0", relationshipTest, results);
for (int i = 0; i < relationshipTest.size(); i++) { for (int i = 0; i < relationshipTest.size(); i++) {
@@ -122,32 +124,6 @@ public class RelationshipServiceImplTest {
} }
} }
@Test
public void testFindLeftPlaceByLeftItem() throws Exception {
// Declare objects utilized in unit test
Item item = mock(Item.class);
// Mock DAO to return mocked left place as 0
when(relationshipDAO.findNextLeftPlaceByLeftItem(context, item)).thenReturn(0);
// The left place reported from out mocked item should match the DAO's report of the left place
assertEquals("TestFindLeftPlaceByLeftItem 0", relationshipDAO.findNextLeftPlaceByLeftItem(context, item),
relationshipService.findNextLeftPlaceByLeftItem(context, item));
}
@Test
public void testFindRightPlaceByRightItem() throws Exception {
// Declare objects utilized in unit test
Item item = mock(Item.class);
// Mock lower level DAO to return mocked right place as 0
when(relationshipDAO.findNextRightPlaceByRightItem(context, item)).thenReturn(0);
// The right place reported from out mocked item should match the DAO's report of the right place
assertEquals("TestFindRightPlaceByRightItem 0", relationshipDAO.findNextRightPlaceByRightItem(context, item),
relationshipService.findNextRightPlaceByRightItem(context, item));
}
@Test @Test
public void testFindByItemAndRelationshipType() throws Exception { public void testFindByItemAndRelationshipType() throws Exception {
// Declare objects utilized in unit test // Declare objects utilized in unit test

View File

@@ -8,6 +8,7 @@
package org.dspace.content; package org.dspace.content;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import java.util.List; import java.util.List;
@@ -88,18 +89,27 @@ public class RightTiltedRelationshipMetadataServiceIT extends RelationshipMetada
//request the virtual metadata of the journal volume //request the virtual metadata of the journal volume
List<RelationshipMetadataValue> volumeRelList = List<RelationshipMetadataValue> volumeRelList =
relationshipMetadataService.getRelationshipMetadata(rightItem, true); relationshipMetadataService.getRelationshipMetadata(rightItem, true);
assertThat(volumeRelList.size(), equalTo(2)); assertThat(volumeRelList.size(), equalTo(3));
assertThat(volumeRelList.get(0).getValue(), equalTo("2"));
assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue")); assertThat(volumeRelList.get(0).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("issueNumber")); assertThat(volumeRelList.get(0).getMetadataField().getMetadataSchema().getName(),
assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo(null)); equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(volumeRelList.get(0).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume"));
assertThat(volumeRelList.get(0).getMetadataField().getQualifier(), equalTo("latestForDiscovery"));
assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(volumeRelList.get(0).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(volumeRelList.get(1).getValue(), equalTo(String.valueOf(leftItem.getID()))); assertThat(volumeRelList.get(1).getValue(), equalTo("2"));
assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), assertThat(volumeRelList.get(1).getMetadataField().getMetadataSchema().getName(), equalTo("publicationissue"));
equalTo(MetadataSchemaEnum.RELATION.getName())); assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("issueNumber"));
assertThat(volumeRelList.get(1).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume")); assertThat(volumeRelList.get(1).getMetadataField().getQualifier(), equalTo(null));
assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID())); assertThat(volumeRelList.get(1).getAuthority(), equalTo("virtual::" + relationship.getID()));
assertThat(volumeRelList.get(2).getValue(), equalTo(String.valueOf(leftItem.getID())));
assertThat(volumeRelList.get(2).getMetadataField().getMetadataSchema().getName(),
equalTo(MetadataSchemaEnum.RELATION.getName()));
assertThat(volumeRelList.get(2).getMetadataField().getElement(), equalTo("isIssueOfJournalVolume"));
assertThat(volumeRelList.get(2).getMetadataField().getQualifier(), nullValue());
assertThat(volumeRelList.get(2).getAuthority(), equalTo("virtual::" + relationship.getID()));
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -138,28 +138,6 @@ public class RelationshipDAOImplTest extends AbstractIntegrationTest {
-1, -1, false)); -1, -1, false));
} }
/**
* Test findNextLeftPlaceByLeftItem should return 0 given our test left Item itemOne.
*
* @throws Exception
*/
@Test
public void testFindNextLeftPlaceByLeftItem() throws Exception {
assertEquals("TestNextLeftPlaceByLeftItem 0", 1, relationshipService.findNextLeftPlaceByLeftItem(context,
itemOne));
}
/**
* Test findNextRightPlaceByRightItem should return 0 given our test right Item itemTwo.
*
* @throws Exception
*/
@Test
public void testFindNextRightPlaceByRightItem() throws Exception {
assertEquals("TestNextRightPlaceByRightItem 0", 1, relationshipService.findNextRightPlaceByRightItem(context,
itemTwo));
}
/** /**
* Test findByRelationshipType should return our defined relationshipsList given our test RelationshipType * Test findByRelationshipType should return our defined relationshipsList given our test RelationshipType
* relationshipType * relationshipType

View File

@@ -0,0 +1,485 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.service;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.sql.SQLException;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.EntityTypeBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.builder.RelationshipBuilder;
import org.dspace.builder.RelationshipTypeBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.versioning.Version;
import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersioningService;
import org.junit.Before;
import org.junit.Test;
public class ItemServiceTest extends AbstractIntegrationTestWithDatabase {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemServiceTest.class);
protected RelationshipService relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
protected RelationshipTypeService relationshipTypeService = ContentServiceFactory.getInstance()
.getRelationshipTypeService();
protected EntityTypeService entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
protected CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService();
protected MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
protected VersioningService versioningService = VersionServiceFactory.getInstance().getVersionService();
Community community;
Collection collection1;
Item item;
String authorQualifier = "author";
String contributorElement = "contributor";
String dcSchema = "dc";
String subjectElement = "subject";
String descriptionElement = "description";
String abstractQualifier = "abstract";
/**
* This method will be run before every test as per @Before. It will
* initialize resources required for the tests.
*/
@Before
@Override
public void setUp() throws Exception {
super.setUp();
try {
context.turnOffAuthorisationSystem();
community = CommunityBuilder.createCommunity(context)
.build();
collection1 = CollectionBuilder.createCollection(context, community)
.withEntityType("Publication")
.build();
WorkspaceItem is = workspaceItemService.create(context, collection1, false);
item = installItemService.installItem(context, is);
context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
} catch (SQLException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
}
}
@Test
public void InsertAndMoveMetadataShiftPlaceTest() throws Exception {
context.turnOffAuthorisationSystem();
// Here we add the first set of metadata to the item
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three");
context.restoreAuthSystemState();
// The code below performs the mentioned assertions to ensure the place is correct
List<MetadataValue> list = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY);
assertThat(list.size(), equalTo(3));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2));
context.turnOffAuthorisationSystem();
// This is where we add metadata at place=1
itemService.addAndShiftRightMetadata(
context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1
);
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3));
// And move metadata from place=2 to place=0
itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0);
context.restoreAuthSystemState();
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3));
}
@Test
public void InsertAndMoveMetadataShiftPlaceTest_complex() throws Exception {
context.turnOffAuthorisationSystem();
// Here we add the first set of metadata to the item
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one");
// NOTE: dc.subject should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1");
// NOTE: dc.subject should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three");
// NOTE: dc.description.abstract should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1");
context.restoreAuthSystemState();
// The code below performs the mentioned assertions to ensure the place is correct
List<MetadataValue> list1 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY);
assertThat(list1.size(), equalTo(3));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2));
List<MetadataValue> list2 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list2.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1));
List<MetadataValue> list3 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list3.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0));
context.turnOffAuthorisationSystem();
// This is where we add metadata at place=1
itemService.addAndShiftRightMetadata(
context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1
);
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
List<MetadataValue> list4 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list4.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3));
List<MetadataValue> list5 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list5.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1));
List<MetadataValue> list6 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list3.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0));
// And move metadata from place=2 to place=0
itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 2, 0);
context.restoreAuthSystemState();
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
List<MetadataValue> list7 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list7.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 0, list7.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 1, list7.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3));
List<MetadataValue> list8 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list8.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1));
List<MetadataValue> list9 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list9.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0));
}
@Test
public void InsertAndMoveMetadataOnePlaceForwardTest() throws Exception {
context.turnOffAuthorisationSystem();
// Here we add the first set of metadata to the item
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three");
context.restoreAuthSystemState();
// The code below performs the mentioned assertions to ensure the place is correct
List<MetadataValue> list = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY);
assertThat(list.size(), equalTo(3));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list.get(2));
context.turnOffAuthorisationSystem();
// This is where we add metadata at place=1
itemService.addAndShiftRightMetadata(
context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1
);
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3));
// And move metadata from place=1 to place=2
itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2);
context.restoreAuthSystemState();
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
list = itemService.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list.get(3));
}
@Test
public void InsertAndMoveMetadataOnePlaceForwardTest_complex() throws Exception {
context.turnOffAuthorisationSystem();
// NOTE: dc.description.abstract should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, descriptionElement, abstractQualifier, null, "test, abs1");
// Here we add the first set of metadata to the item
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, one");
// NOTE: dc.subject should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub1");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, two");
itemService.addMetadata(context, item, dcSchema, contributorElement, authorQualifier, null, "test, three");
// NOTE: dc.subject should NOT affect dc.contributor.author
itemService.addMetadata(context, item, dcSchema, subjectElement, null, null, "test, sub2");
context.restoreAuthSystemState();
// The code below performs the mentioned assertions to ensure the place is correct
List<MetadataValue> list1 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY);
assertThat(list1.size(), equalTo(3));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list1.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list1.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 2, list1.get(2));
List<MetadataValue> list2 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list2.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list2.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list2.get(1));
List<MetadataValue> list3 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list3.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list3.get(0));
context.turnOffAuthorisationSystem();
// This is where we add metadata at place=1
itemService.addAndShiftRightMetadata(
context, item, dcSchema, contributorElement, authorQualifier, null, "test, four", null, -1, 1
);
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
List<MetadataValue> list4 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list4.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list4.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 1, list4.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 2, list4.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list4.get(3));
List<MetadataValue> list5 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list5.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list5.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list5.get(1));
List<MetadataValue> list6 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list6.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list6.get(0));
// And move metadata from place=1 to place=2
itemService.moveMetadata(context, item, dcSchema, contributorElement, authorQualifier, 1, 2);
context.restoreAuthSystemState();
// Here we retrieve the list of metadata again to perform the assertions on the places below as mentioned
List<MetadataValue> list7 = itemService
.getMetadata(item, dcSchema, contributorElement, authorQualifier, Item.ANY)
.stream()
.sorted(Comparator.comparingInt(MetadataValue::getPlace))
.collect(Collectors.toList());
assertThat(list7.size(), equalTo(4));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, one", null, 0, list7.get(0));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, two", null, 1, list7.get(1));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, four", null, 2, list7.get(2));
assertMetadataValue(authorQualifier, contributorElement, dcSchema, "test, three", null, 3, list7.get(3));
List<MetadataValue> list8 = itemService
.getMetadata(item, dcSchema, subjectElement, null, Item.ANY);
assertThat(list8.size(), equalTo(2));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub1", null, 0, list8.get(0));
assertMetadataValue(null, subjectElement, dcSchema, "test, sub2", null, 1, list8.get(1));
List<MetadataValue> list9 = itemService
.getMetadata(item, dcSchema, descriptionElement, abstractQualifier, Item.ANY);
assertThat(list9.size(), equalTo(1));
assertMetadataValue(abstractQualifier, descriptionElement, dcSchema, "test, abs1", null, 0, list9.get(0));
}
@Test
public void testDeleteItemWithMultipleVersions() throws Exception {
context.turnOffAuthorisationSystem();
EntityType publicationEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Publication")
.build();
EntityType personEntityType = EntityTypeBuilder.createEntityTypeBuilder(context, "Person")
.build();
RelationshipType isAuthorOfPublication = RelationshipTypeBuilder.createRelationshipTypeBuilder(
context, publicationEntityType, personEntityType, "isAuthorOfPublication", "isPublicationOfAuthor",
null, null, null, null
)
.withCopyToLeft(false)
.withCopyToRight(false)
.build();
Collection collection2 = CollectionBuilder.createCollection(context, community)
.withEntityType("Person")
.build();
Item publication1 = ItemBuilder.createItem(context, collection1)
.withTitle("publication 1")
// NOTE: entity type comes from collection
.build();
Item person1 = ItemBuilder.createItem(context, collection2)
.withTitle("person 2")
// NOTE: entity type comes from collection
.build();
RelationshipBuilder.createRelationshipBuilder(context, publication1, person1, isAuthorOfPublication);
// create a new version, which results in a non-latest relationship attached person 1.
Version newVersion = versioningService.createNewVersion(context, publication1);
Item newPublication1 = newVersion.getItem();
WorkspaceItem newPublication1WSI = workspaceItemService.findByItem(context, newPublication1);
installItemService.installItem(context, newPublication1WSI);
context.dispatchEvents();
// verify person1 has a non-latest relationship, which should also be removed
List<Relationship> relationships1 = relationshipService.findByItem(context, person1, -1, -1, false, true);
assertEquals(1, relationships1.size());
List<Relationship> relationships2 = relationshipService.findByItem(context, person1, -1, -1, false, false);
assertEquals(2, relationships2.size());
itemService.delete(context, person1);
context.restoreAuthSystemState();
}
private void assertMetadataValue(String authorQualifier, String contributorElement, String dcSchema, String value,
String authority, int place, MetadataValue metadataValue) {
assertThat(metadataValue.getValue(), equalTo(value));
assertThat(metadataValue.getMetadataField().getMetadataSchema().getName(), equalTo(dcSchema));
assertThat(metadataValue.getMetadataField().getElement(), equalTo(contributorElement));
assertThat(metadataValue.getMetadataField().getQualifier(), equalTo(authorQualifier));
assertThat(metadataValue.getAuthority(), equalTo(authority));
assertThat(metadataValue.getPlace(), equalTo(place));
}
}

View File

@@ -8,13 +8,18 @@
package org.dspace.discovery; package org.dspace.discovery;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List; import java.util.List;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
@@ -86,6 +91,12 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance()
.getMetadataAuthorityService(); .getMetadataAuthorityService();
@Override
public void setUp() throws Exception {
super.setUp();
configurationService.setProperty("solr-database-resync.time-until-reindex", 1);
}
@Test @Test
public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception { public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception {
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -371,7 +382,8 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
collectionService.delete(context, col1); collectionService.delete(context, col1);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
assertSearchQuery(IndexableCollection.TYPE, 2); assertSearchQuery(IndexableCollection.TYPE, 2);
assertSearchQuery(IndexableItem.TYPE, 2); // Deleted item contained within totalFound due to predb status (SolrDatabaseResyncCli takes care of this)
assertSearchQuery(IndexableItem.TYPE, 2, 3, 0, -1);
} }
@Test @Test
@@ -453,6 +465,10 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1); assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1);
// check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6 // check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2); assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2);
// Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items
performSolrDatabaseResyncScript();
// check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3 // check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3
assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4); assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4);
// check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3 // check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
@@ -639,10 +655,79 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
// check Item type with start=0 and limit=default, // check Item type with start=0 and limit=default,
// we expect: indexableObjects=3, totalFound=6 (3 stale objects here) // we expect: indexableObjects=3, totalFound=6 (3 stale objects here)
assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1); assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1);
// as the previous query hit the stale objects running a new query should lead to a clean situation
// Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items
performSolrDatabaseResyncScript();
// as SolrDatabaseResyncCli removed the stale objects, running a new query should lead to a clean situation
assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1); assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1);
} }
@Test
public void iteratorSearchServiceTest() throws SearchServiceException {
String subject1 = "subject1";
String subject2 = "subject2";
int numberItemsSubject1 = 30;
int numberItemsSubject2 = 2;
Item[] itemsSubject1 = new Item[numberItemsSubject1];
Item[] itemsSubject2 = new Item[numberItemsSubject2];
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context).build();
Collection collection = CollectionBuilder.createCollection(context, community).build();
for (int i = 0; i < numberItemsSubject1; i++) {
itemsSubject1[i] = ItemBuilder.createItem(context, collection)
.withTitle("item subject 1 number" + i)
.withSubject(subject1)
.build();
}
for (int i = 0; i < numberItemsSubject2; i++) {
itemsSubject2[i] = ItemBuilder.createItem(context, collection)
.withTitle("item subject 2 number " + i)
.withSubject(subject2)
.build();
}
Collection collection2 = CollectionBuilder.createCollection(context, community).build();
ItemBuilder.createItem(context, collection2)
.withTitle("item collection2")
.withSubject(subject1)
.build();
context.restoreAuthSystemState();
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries("subject:" + subject1);
Iterator<Item> itemIterator =
searchService.iteratorSearch(context, new IndexableCollection(collection), discoverQuery);
int counter = 0;
List<Item> foundItems = new ArrayList<>();
while (itemIterator.hasNext()) {
foundItems.add(itemIterator.next());
counter++;
}
for (Item item : itemsSubject1) {
assertTrue(foundItems.contains(item));
}
assertEquals(numberItemsSubject1, counter);
discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries("subject:" + subject2);
itemIterator = searchService.iteratorSearch(context, null, discoverQuery);
counter = 0;
foundItems = new ArrayList<>();
while (itemIterator.hasNext()) {
foundItems.add(itemIterator.next());
counter++;
}
assertEquals(numberItemsSubject2, counter);
for (Item item : itemsSubject2) {
assertTrue(foundItems.contains(item));
}
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1); assertSearchQuery(resourceType, size, size, 0, -1);
} }
@@ -739,6 +824,13 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
context.setCurrentUser(previousUser); context.setCurrentUser(previousUser);
} }
public void performSolrDatabaseResyncScript() throws Exception {
String[] args = new String[] {"solr-database-resync"};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher
.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
}
private void abort(XmlWorkflowItem workflowItem) private void abort(XmlWorkflowItem workflowItem)
throws SQLException, AuthorizeException, IOException, SearchServiceException { throws SQLException, AuthorizeException, IOException, SearchServiceException {
final EPerson previousUser = context.getCurrentUser(); final EPerson previousUser = context.getCurrentUser();

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.utils; package org.dspace.discovery.utils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT; import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT;
@@ -16,10 +16,10 @@ import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyInt;
@@ -35,9 +35,6 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFacetField;
import org.dspace.discovery.DiscoverFilterQuery; import org.dspace.discovery.DiscoverFilterQuery;
@@ -45,6 +42,7 @@ import org.dspace.discovery.DiscoverHitHighlightingField;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SolrServiceImpl; import org.dspace.discovery.SolrServiceImpl;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
@@ -56,6 +54,7 @@ import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration; import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration;
import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.hamcrest.FeatureMatcher; import org.hamcrest.FeatureMatcher;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
@@ -65,8 +64,7 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
/** /**
* Unit tests for {@link DiscoverQueryBuilder} * Unit tests for {@link DiscoverQueryBuilder}
@@ -94,8 +92,14 @@ public class DiscoverQueryBuilderTest {
private DiscoveryConfiguration discoveryConfiguration; private DiscoveryConfiguration discoveryConfiguration;
private String query; private String query;
private SearchFilter searchFilter;
private PageRequest page; private int pageSize = 10;
private long offset = 10;
private String sortProperty = "dc.title";
private String sortDirection = "ASC";
private QueryBuilderSearchFilter searchFilter;
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
@@ -109,14 +113,16 @@ public class DiscoverQueryBuilderTest {
.then(invocation -> invocation.getArguments()[0] + "_sort"); .then(invocation -> invocation.getArguments()[0] + "_sort");
when(searchService when(searchService
.getFacetYearRange(eq(context), nullable(IndexableObject.class), any(DiscoverySearchFilterFacet.class), .getFacetYearRange(eq(context), nullable(IndexableObject.class),
any(DiscoverySearchFilterFacet.class),
any(), any(DiscoverQuery.class))) any(), any(DiscoverQuery.class)))
.then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); .then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2]));
when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class),
any(DiscoveryConfiguration.class))) any(DiscoveryConfiguration.class)))
.then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1],
invocation.getArguments()[1] + ":\"" + invocation.getArguments()[3] + "\"", invocation.getArguments()[1] + ":\"" + invocation
.getArguments()[3] + "\"",
(String) invocation.getArguments()[3])); (String) invocation.getArguments()[3]));
discoveryConfiguration = new DiscoveryConfiguration(); discoveryConfiguration = new DiscoveryConfiguration();
@@ -177,9 +183,8 @@ public class DiscoverQueryBuilderTest {
discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet));
discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet));
searchFilter = new QueryBuilderSearchFilter("subject", "equals", "Java");
query = "my test case"; query = "my test case";
searchFilter = new SearchFilter("subject", "equals", "Java");
page = PageRequest.of(1, 10, Sort.Direction.ASC, "dc.title");
queryBuilder.afterPropertiesSet(); queryBuilder.afterPropertiesSet();
} }
@@ -188,7 +193,8 @@ public class DiscoverQueryBuilderTest {
public void testBuildQuery() throws Exception { public void testBuildQuery() throws Exception {
DiscoverQuery discoverQuery = queryBuilder DiscoverQuery discoverQuery = queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "item", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"item", pageSize, offset, sortProperty, sortDirection);
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\""));
assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getQuery(), is(query));
@@ -214,10 +220,11 @@ public class DiscoverQueryBuilderTest {
@Test @Test
public void testBuildQueryDefaults() throws Exception { public void testBuildQueryDefaults() throws Exception {
DiscoverQuery discoverQuery = DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null); queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null, null,
null, null);
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true"));
assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); assertThat(discoverQuery.getQuery(), isEmptyOrNullString());
assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty()));
//Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid
// mock // mock
@@ -241,13 +248,12 @@ public class DiscoverQueryBuilderTest {
@Test @Test
public void testSortByScore() throws Exception { public void testSortByScore() throws Exception {
page = PageRequest.of(2, 10, Sort.Direction.ASC, "SCORE");
DiscoverQuery discoverQuery = DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), 10, 20L,
"SCORE", "ASC");
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true"));
assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); assertThat(discoverQuery.getQuery(), isEmptyOrNullString());
assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty()));
//Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid
// mock // mock
@@ -269,48 +275,50 @@ public class DiscoverQueryBuilderTest {
)); ));
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidDSOType() throws Exception { public void testInvalidDSOType() throws Exception {
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "TEST", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"TEST", pageSize, offset, sortProperty, sortDirection);
} }
@Test(expected = InvalidSearchRequestException.class) @Test(expected = SearchServiceException.class)
public void testInvalidSortField() throws Exception { public void testInvalidSortField() throws Exception {
page = PageRequest.of(2, 10, Sort.Direction.ASC, "test");
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"ITEM", pageSize, 20L, "test", sortDirection);
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFilter1() throws Exception { public void testInvalidSearchFilter1() throws Exception {
searchFilter = new SearchFilter("test", "equals", "Smith, Donald"); searchFilter = new QueryBuilderSearchFilter("test", "equals", "Smith, Donald");
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM",
pageSize, offset, sortProperty, sortDirection);
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFilter2() throws Exception { public void testInvalidSearchFilter2() throws Exception {
when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class),
any(DiscoveryConfiguration.class))) any(DiscoveryConfiguration.class)))
.thenThrow(SQLException.class); .thenThrow(SQLException.class);
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM",
pageSize, offset, sortProperty, sortDirection);
} }
@Test @Test
public void testBuildFacetQuery() throws Exception { public void testBuildFacetQuery() throws Exception {
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, "prefix",
"prefix", query, query, Collections.singletonList(searchFilter),
Arrays.asList(searchFilter), "item", page, "item", pageSize, offset, "subject");
"subject");
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\""));
assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getQuery(), is(query));
assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE)); assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE));
assertThat(discoverQuery.getSortField(), is(emptyOrNullString())); assertThat(discoverQuery.getSortField(), isEmptyOrNullString());
assertThat(discoverQuery.getMaxResults(), is(0)); assertThat(discoverQuery.getMaxResults(), is(0));
assertThat(discoverQuery.getStart(), is(0)); assertThat(discoverQuery.getStart(), is(0));
assertThat(discoverQuery.getFacetMinCount(), is(1)); assertThat(discoverQuery.getFacetMinCount(), is(1));
@@ -321,10 +329,10 @@ public class DiscoverQueryBuilderTest {
)); ));
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFacet() throws Exception { public void testInvalidSearchFacet() throws Exception {
queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query, queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query,
Arrays.asList(searchFilter), "item", page, "test"); Collections.singletonList(searchFilter), "item", pageSize, offset, "test");
} }
public Matcher<DiscoverFacetField> discoverFacetFieldMatcher(DiscoverFacetField expected) { public Matcher<DiscoverFacetField> discoverFacetFieldMatcher(DiscoverFacetField expected) {

View File

@@ -1,87 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.external.provider.impl;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.util.FileCopyUtils;
/**
* we override the init method to mock the rest call to pubmed the following
* mock definitions will allow to answer to efetch or esearch requests using the
* test resource files (pubmed-esearch.fcgi.xml or pubmed-efetch.fcgi.xml)
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class MockPubmedImportMetadataSourceServiceImpl extends PubmedImportMetadataSourceServiceImpl {
@Override
public void init() throws Exception {
pubmedWebTarget = Mockito.mock(WebTarget.class);
ArgumentCaptor<String> valueCapture = ArgumentCaptor.forClass(String.class);
when(pubmedWebTarget.queryParam(ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenAnswer(new Answer<WebTarget>() {
@Override
public WebTarget answer(InvocationOnMock invocation) throws Throwable {
return pubmedWebTarget;
}
});
when(pubmedWebTarget.path(valueCapture.capture())).thenAnswer(new Answer<WebTarget>() {
@Override
public WebTarget answer(InvocationOnMock invocation) throws Throwable {
return pubmedWebTarget;
}
});
when(pubmedWebTarget.request(ArgumentMatchers.any(MediaType.class)))
.thenAnswer(new Answer<Invocation.Builder>() {
@Override
public Invocation.Builder answer(InvocationOnMock invocation) throws Throwable {
Invocation.Builder builder = Mockito.mock(Invocation.Builder.class);
when(builder.get()).thenAnswer(new Answer<Response>() {
@Override
public Response answer(InvocationOnMock invocation) throws Throwable {
Response response = Mockito.mock(Response.class);
when(response.readEntity(ArgumentMatchers.eq(String.class))).then(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
String resourceName = "pubmed-" + valueCapture.getValue() + ".xml";
InputStream resource = getClass().getResourceAsStream(resourceName);
try (Reader reader = new InputStreamReader(resource, StandardCharsets.UTF_8)) {
return FileCopyUtils.copyToString(reader);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
return response;
}
});
return builder;
};
});
}
}

View File

@@ -0,0 +1,90 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.process;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.GroupBuilder;
import org.dspace.builder.ProcessBuilder;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.junit.Test;
/**
* This class will aim to test Process related use cases
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/
public class ProcessIT extends AbstractIntegrationTestWithDatabase {
protected ProcessService processService = ScriptServiceFactory.getInstance().getProcessService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
@Test
public void checkProcessGroupsTest() throws Exception {
context.turnOffAuthorisationSystem();
Group groupA = GroupBuilder.createGroup(context)
.withName("Group A")
.addMember(admin)
.build();
Set<Group> groupSet = new HashSet<>();
groupSet.add(groupA);
Process processA = ProcessBuilder.createProcess(context, admin, "mock-script",
new LinkedList<>(),
groupSet).build();
context.restoreAuthSystemState();
Process process = processService.find(context, processA.getID());
List<Group> groups = process.getGroups();
boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupA.getID()));
assertTrue(isPresent);
}
@Test
public void removeOneGroupTest() throws Exception {
context.turnOffAuthorisationSystem();
Group groupA = GroupBuilder.createGroup(context)
.withName("Group A")
.addMember(admin).build();
Set<Group> groupSet = new HashSet<>();
groupSet.add(groupA);
UUID groupUuid = groupA.getID();
Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", new LinkedList<>(),
groupSet).build();
context.restoreAuthSystemState();
groupService.delete(context, groupA);
context.commit();
context.reloadEntity(groupA);
processA = context.reloadEntity(processA);
Process process = processService.find(context, processA.getID());
List<Group> groups = process.getGroups();
boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupUuid));
assertFalse(isPresent);
}
}

View File

@@ -0,0 +1,53 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.hasProperty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import org.dspace.content.Item;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.RelationshipType;
import org.hamcrest.Matcher;
/**
* Methods for testing relationships and their behavior with versioned items.
*/
public class RelationshipVersioningTestUtils {
private RelationshipVersioningTestUtils() {}
public static Matcher<Object> isRel(
Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus,
int leftPlace, int rightPlace
) {
return isRel(leftItem, relationshipType, rightItem, latestVersionStatus, null, null, leftPlace, rightPlace);
}
public static Matcher<Object> isRel(
Item leftItem, RelationshipType relationshipType, Item rightItem, LatestVersionStatus latestVersionStatus,
String leftwardValue, String rightwardValue, int leftPlace, int rightPlace
) {
return allOf(
hasProperty("leftItem", is(leftItem)),
// NOTE: this is a painful one... class RelationshipType does not implement the equals method, so we cannot
// rely on object equality and have to compare ids instead. It has to be in capital letters,
// because the getter has been implemented inconsistently (#id vs #setId() vs #getID()).
hasProperty("relationshipType", hasProperty("ID", is(relationshipType.getID()))),
hasProperty("rightItem", is(rightItem)),
hasProperty("leftPlace", is(leftPlace)),
hasProperty("rightPlace", is(rightPlace)),
hasProperty("leftwardValue", leftwardValue == null ? nullValue() : is(leftwardValue)),
hasProperty("rightwardValue", rightwardValue == null ? nullValue() : is(rightwardValue)),
hasProperty("latestVersionStatus", is(latestVersionStatus))
);
}
}

View File

@@ -20,6 +20,7 @@ import org.dspace.app.rest.utils.DSpaceAPIRequestLoggingFilter;
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
import org.dspace.app.rest.utils.DSpaceKernelInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer;
import org.dspace.app.sitemap.GenerateSitemaps; import org.dspace.app.sitemap.GenerateSitemaps;
import org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli;
import org.dspace.app.util.DSpaceContextListener; import org.dspace.app.util.DSpaceContextListener;
import org.dspace.utils.servlet.DSpaceWebappServletFilter; import org.dspace.utils.servlet.DSpaceWebappServletFilter;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -71,6 +72,11 @@ public class Application extends SpringBootServletInitializer {
GenerateSitemaps.generateSitemapsScheduled(); GenerateSitemaps.generateSitemapsScheduled();
} }
@Scheduled(cron = "${solr-database-resync.cron:-}")
public void solrDatabaseResync() throws Exception {
SolrDatabaseResyncCli.runScheduled();
}
/** /**
* Override the default SpringBootServletInitializer.configure() method, * Override the default SpringBootServletInitializer.configure() method,
* passing it this Application class. * passing it this Application class.

Some files were not shown because too many files have changed in this diff Show More