Merge remote-tracking branch 'bit-bucket/main-duracom-69' into main-duracom-69

This commit is contained in:
Vincenzo Mecca
2022-06-21 15:33:21 +02:00
203 changed files with 27486 additions and 1433 deletions

View File

@@ -550,10 +550,17 @@
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
</dependency>
<!-- Codebase at https://github.com/OCLC-Research/oaiharvester2/ -->
<dependency>
<groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId>
</dependency>
<!-- Xalan is REQUIRED by 'oclc-harvester2' listed above (OAI harvesting fails without it).
Please do NOT use Xalan in DSpace codebase as it is not well maintained. -->
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId>

View File

@@ -0,0 +1,170 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -925,11 +925,10 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
rightItem = item;
}
// Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
Relationship persistedRelationship = relationshipService.create(c, leftItem, rightItem,
foundRelationshipType, leftPlace, rightPlace);
// Create the relationship, appending to the end
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, -1, -1
);
relationshipService.update(c, persistedRelationship);
}

View File

@@ -403,10 +403,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
}
// Create the relationship
int leftPlace = relationshipService.findNextLeftPlaceByLeftItem(c, leftItem);
int rightPlace = relationshipService.findNextRightPlaceByRightItem(c, rightItem);
Relationship persistedRelationship = relationshipService.create(
c, leftItem, rightItem, foundRelationshipType, leftPlace, rightPlace);
Relationship persistedRelationship =
relationshipService.create(c, leftItem, rightItem, foundRelationshipType, -1, -1);
// relationshipService.update(c, persistedRelationship);
System.out.println("\tAdded relationship (type: " + relationshipType + ") from " +

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.dspace.content.Item;
import org.dspace.core.ReloadableEntity;
import org.dspace.eperson.EPerson;
/**
* Entity that stores ORCID access-token related to a given eperson or a given
* profile item.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Entity
@Table(name = "orcid_token")
public class OrcidToken implements ReloadableEntity<Integer> {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_token_id_seq")
@SequenceGenerator(name = "orcid_token_id_seq", sequenceName = "orcid_token_id_seq", allocationSize = 1)
private Integer id;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "eperson_id")
protected EPerson ePerson;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "profile_item_id")
private Item profileItem;
@Column(name = "access_token")
private String accessToken;
@Override
public Integer getID() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public EPerson getEPerson() {
return ePerson;
}
public void setEPerson(EPerson eperson) {
this.ePerson = eperson;
}
public Item getProfileItem() {
return profileItem;
}
public void setProfileItem(Item profileItem) {
this.profileItem = profileItem;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import org.dspace.app.orcid.exception.OrcidClientException;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.orcid.jaxb.model.v3.release.record.Person;
/**
* Interface for classes that allow to contact ORCID.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidClient {
/**
* Exchange the authorization code for an ORCID iD and 3-legged access token.
* The authorization code expires upon use.
*
* @param code the authorization code
* @return the ORCID token
* @throws OrcidClientException if some error occurs during the exchange
*/
OrcidTokenResponseDTO getAccessToken(String code);
/**
* Retrieves a summary of the ORCID person related to the given orcid.
*
* @param accessToken the access token
* @param orcid the orcid id of the record to retrieve
* @return the Person
* @throws OrcidClientException if some error occurs during the search
*/
Person getPerson(String accessToken, String orcid);
}

View File

@@ -0,0 +1,167 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import static org.apache.http.client.methods.RequestBuilder.get;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamReader;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicNameValuePair;
import org.dspace.app.orcid.exception.OrcidClientException;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.util.ThrowingSupplier;
import org.orcid.jaxb.model.v3.release.record.Person;
/**
* Implementation of {@link OrcidClient}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidClientImpl implements OrcidClient {
private final OrcidConfiguration orcidConfiguration;
private final ObjectMapper objectMapper;
public OrcidClientImpl(OrcidConfiguration orcidConfiguration) {
this.orcidConfiguration = orcidConfiguration;
this.objectMapper = new ObjectMapper();
}
@Override
public OrcidTokenResponseDTO getAccessToken(String code) {
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("code", code));
params.add(new BasicNameValuePair("grant_type", "authorization_code"));
params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId()));
params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret()));
HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl())
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.addHeader("Accept", "application/json")
.setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset()))
.build();
return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class);
}
@Override
public Person getPerson(String accessToken, String orcid) {
HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/person");
return executeAndUnmarshall(httpUriRequest, false, Person.class);
}
private HttpUriRequest buildGetUriRequest(String accessToken, String relativePath) {
return get(orcidConfiguration.getApiUrl() + relativePath.trim())
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.addHeader("Authorization", "Bearer " + accessToken)
.build();
}
private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) {
HttpClient client = HttpClientBuilder.create().build();
return executeAndReturns(() -> {
HttpResponse response = client.execute(httpUriRequest);
if (isNotSuccessfull(response)) {
throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response));
}
return objectMapper.readValue(response.getEntity().getContent(), clazz);
});
}
private <T> T executeAndUnmarshall(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull, Class<T> clazz) {
HttpClient client = HttpClientBuilder.create().build();
return executeAndReturns(() -> {
HttpResponse response = client.execute(httpUriRequest);
if (handleNotFoundAsNull && isNotFound(response)) {
return null;
}
if (isNotSuccessfull(response)) {
throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response));
}
return unmarshall(response.getEntity(), clazz);
});
}
private <T> T executeAndReturns(ThrowingSupplier<T, Exception> supplier) {
try {
return supplier.get();
} catch (OrcidClientException ex) {
throw ex;
} catch (Exception ex) {
throw new OrcidClientException(ex);
}
}
@SuppressWarnings("unchecked")
private <T> T unmarshall(HttpEntity entity, Class<T> clazz) throws Exception {
JAXBContext jaxbContext = JAXBContext.newInstance(clazz);
XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(entity.getContent());
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
return (T) unmarshaller.unmarshal(xmlStreamReader);
}
private String formatErrorMessage(HttpResponse response) {
try {
return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
} catch (UnsupportedOperationException | IOException e) {
return "Generic error";
}
}
private boolean isNotSuccessfull(HttpResponse response) {
int statusCode = getStatusCode(response);
return statusCode < 200 || statusCode > 299;
}
private boolean isNotFound(HttpResponse response) {
return getStatusCode(response) == HttpStatus.SC_NOT_FOUND;
}
private int getStatusCode(HttpResponse response) {
return response.getStatusLine().getStatusCode();
}
}

View File

@@ -0,0 +1,90 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import org.apache.commons.lang3.StringUtils;
/**
* A class that contains all the configurations related to ORCID.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public final class OrcidConfiguration {
private String apiUrl;
private String redirectUrl;
private String clientId;
private String clientSecret;
private String tokenEndpointUrl;
private String authorizeEndpointUrl;
private String scopes;
public String getApiUrl() {
return apiUrl;
}
public void setApiUrl(String apiUrl) {
this.apiUrl = apiUrl;
}
public String getRedirectUrl() {
return redirectUrl;
}
public void setRedirectUrl(String redirectUrl) {
this.redirectUrl = redirectUrl;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getTokenEndpointUrl() {
return tokenEndpointUrl;
}
public void setTokenEndpointUrl(String tokenEndpointUrl) {
this.tokenEndpointUrl = tokenEndpointUrl;
}
public String getAuthorizeEndpointUrl() {
return authorizeEndpointUrl;
}
public void setAuthorizeEndpointUrl(String authorizeEndpointUrl) {
this.authorizeEndpointUrl = authorizeEndpointUrl;
}
public void setScopes(String scopes) {
this.scopes = scopes;
}
public String[] getScopes() {
return StringUtils.isNotBlank(scopes) ? StringUtils.split(scopes, ",") : new String[] {};
}
}

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.dao;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import org.dspace.eperson.EPerson;
/**
* Database Access Object interface class for the OrcidToken object. The
* implementation of this class is responsible for all database calls for the
* OrcidToken object and is autowired by spring. This class should only be
* accessed from a single service and should never be exposed outside of the API
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidTokenDAO extends GenericDAO<OrcidToken> {
/**
* Find an OrcidToken by ePerson.
*
* @param context the DSpace context
* @param ePerson the ePerson to search for
* @return the Orcid token, if any
*/
public OrcidToken findByEPerson(Context context, EPerson ePerson);
/**
* Find an OrcidToken by profileItem.
*
* @param context the DSpace context
* @param profileItem the profile item to search for
* @return the Orcid token, if any
*/
public OrcidToken findByProfileItem(Context context, Item profileItem);
}

View File

@@ -0,0 +1,50 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.dao.impl;
import java.sql.SQLException;
import javax.persistence.Query;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.dao.OrcidTokenDAO;
import org.dspace.content.Item;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Implementation of {@link OrcidTokenDAO}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidTokenDAOImpl extends AbstractHibernateDAO<OrcidToken> implements OrcidTokenDAO {
@Override
public OrcidToken findByEPerson(Context context, EPerson ePerson) {
try {
Query query = createQuery(context, "FROM OrcidToken WHERE ePerson = :ePerson");
query.setParameter("ePerson", ePerson);
return singleResult(query);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public OrcidToken findByProfileItem(Context context, Item profileItem) {
try {
Query query = createQuery(context, "FROM OrcidToken WHERE profileItem = :profileItem");
query.setParameter("profileItem", profileItem);
return singleResult(query);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,48 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.exception;
/**
* Exception throwable from class that implements {@link OrcidClient} in case of
* error response from the ORCID registry.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidClientException extends RuntimeException {
public static final String INVALID_GRANT_MESSAGE = "invalid_grant";
private static final long serialVersionUID = -7618061110212398216L;
private int status = 0;
public OrcidClientException(int status, String content) {
super(content);
this.status = status;
}
public OrcidClientException(Throwable cause) {
super(cause);
}
public int getStatus() {
return this.status;
}
/**
* Returns true if the exception is related to an invalid grant error
* (authentication code non valid), false otherwise
*
* @return the check result
*/
public boolean isInvalidGrantException() {
return getMessage() != null && getMessage().contains(INVALID_GRANT_MESSAGE);
}
}

View File

@@ -0,0 +1,28 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.model;
/**
* The types of activities defined on ORCID that can be synchronized.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidEntityType {
/**
* The publication/work activity.
*/
PUBLICATION,
/**
* The funding activity.
*/
FUNDING;
}

View File

@@ -0,0 +1,135 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.StringUtils;
/**
* This class map the response from and ORCID token endpoint.
*
* Response example:
*
* {
* "access_token":"f5af9f51-07e6-4332-8f1a-c0c11c1e3728",
* "token_type":"bearer",
* "refresh_token":"f725f747-3a65-49f6-a231-3e8944ce464d",
* "expires_in":631138518,
* "scope":"/read-limited",
* "name":"Sofia Garcia",
* "orcid":"0000-0001-2345-6789"
* }
*
* @author Luca Giamminonni (luca.giamminonni at 4Science.it)
*
*/
public class OrcidTokenResponseDTO {
/**
* The access token release by the authorization server this is the most
* relevant item, because it allow the server to access to the user resources as
* defined in the scopes.
*/
@JsonProperty("access_token")
private String accessToken;
/**
* The refresh token as defined in the OAuth standard.
*/
@JsonProperty("refresh_token")
private String refreshToken;
/**
* It will be "bearer".
*/
@JsonProperty("token_type")
private String tokenType;
/**
* The expiration timestamp in millis.
*/
@JsonProperty("expires_in")
private int expiresIn;
/**
* List of scopes.
*/
private String scope;
/**
* The ORCID user name.
*/
private String name;
/**
* The ORCID user id.
*/
private String orcid;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getRefreshToken() {
return refreshToken;
}
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public String getTokenType() {
return tokenType;
}
public void setTokenType(String tokenType) {
this.tokenType = tokenType;
}
public int getExpiresIn() {
return expiresIn;
}
public void setExpiresIn(int expiresIn) {
this.expiresIn = expiresIn;
}
public String getScope() {
return scope;
}
public void setScope(String scope) {
this.scope = scope;
}
@JsonIgnore
public String[] getScopeAsArray() {
return StringUtils.isEmpty(getScope()) ? new String[] {} : getScope().split(" ");
}
}

View File

@@ -0,0 +1,147 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service;
import java.sql.SQLException;
import java.util.List;
import java.util.Optional;
import org.dspace.app.orcid.model.OrcidEntityType;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.profile.OrcidEntitySyncPreference;
import org.dspace.app.profile.OrcidProfileDisconnectionMode;
import org.dspace.app.profile.OrcidProfileSyncPreference;
import org.dspace.app.profile.OrcidSynchronizationMode;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Service that handle the the syncronization between a DSpace profile and the
* relative ORCID profile, if any.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/
public interface OrcidSynchronizationService {
/**
* Check if the given item is linked to an ORCID profile.
*
* @param context the relevant DSpace Context.
* @param item the item to check
* @return true if the given item is linked to ORCID
*/
boolean isLinkedToOrcid(Context context, Item item);
/**
* Configure the given profile with the data present in the given ORCID token.
* This action is required to synchronize profile and related entities with
* ORCID. No security check is done, it is therefore the caller's responsibility
* to verify for example that the current user has permission to connect the
* profile to ORCID (if necessary).
*
* @param context the relevant DSpace Context.
* @param profile the profile to configure
* @param token the ORCID token
* @throws SQLException if a SQL error occurs during the profile update
*/
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException;
/**
* Disconnect the given profile from ORCID.
*
* @param context the relevant DSpace Context.
* @param profile the profile to disconnect
* @throws SQLException if a SQL error occurs during the profile update
*/
public void unlinkProfile(Context context, Item profile) throws SQLException;
/**
* Set the synchronization preference for the given profile related to the given
* ORCID entity type.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param entityType the orcid entity type
* @param value the new synchronization preference value
* @return true if the value has actually been updated,
* false if the value to be set is the same as
* the one already configured
* @throws SQLException if a SQL error occurs during the profile
* update
* @throws IllegalArgumentException if the given researcher profile is no linked
* with an ORCID account
*/
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType entityType,
OrcidEntitySyncPreference value) throws SQLException;
/**
* Update the profile's synchronization preference for the given profile.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param values the new synchronization preference values
* @return true if the value has actually been updated,
* false if the value to be set is the same as
* the one already configured
* @throws SQLException if a SQL error occurs during the profile
* update
* @throws IllegalArgumentException if the given researcher profile is no linked
* with an ORCID account
*/
public boolean setProfilePreference(Context context, Item profile,
List<OrcidProfileSyncPreference> values) throws SQLException;
/**
* Set the ORCID synchronization mode for the given profile.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param value the new synchronization mode value
* @return true if the value has actually been updated, false if
* the value to be set is the same as the one already
* configured
* @throws SQLException if a SQL error occurs during the profile update
*/
public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value)
throws SQLException;
/**
* Returns the ORCID synchronization mode configured for the given profile item.
*
* @param profile the researcher profile item
* @return the synchronization mode
*/
Optional<OrcidSynchronizationMode> getSynchronizationMode(Item profile);
/**
* Returns the ORCID synchronization preference related to the given entity type
* configured for the given profile item.
*
* @param profile the researcher profile item
* @param entityType the orcid entity type
* @return the configured preference
*/
Optional<OrcidEntitySyncPreference> getEntityPreference(Item profile, OrcidEntityType entityType);
/**
* Returns the ORCID synchronization preferences related to the profile itself
* configured for the given profile item.
*
* @param profile the researcher profile item
* @return the synchronization mode
*/
List<OrcidProfileSyncPreference> getProfilePreferences(Item profile);
/**
* Returns the configuration ORCID profile's disconnection mode. If that mode is
* not configured or the configuration is wrong, the value DISABLED is returned.
*
* @return the disconnection mode
*/
OrcidProfileDisconnectionMode getDisconnectionMode();
}

View File

@@ -0,0 +1,92 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Service that handle {@link OrcidToken} entities.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidTokenService {
/**
* Creates a new OrcidToken entity for the given ePerson and accessToken.
*
* @param context the DSpace context
* @param ePerson the EPerson
* @param accessToken the access token
* @return the created entity instance
*/
public OrcidToken create(Context context, EPerson ePerson, String accessToken);
/**
* Creates a new OrcidToken entity for the given ePerson and accessToken.
*
* @param context the DSpace context
* @param ePerson the EPerson
* @param profileItem the profile item
* @param accessToken the access token
* @return the created entity instance
*/
public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken);
/**
* Find an OrcidToken by ePerson.
*
* @param context the DSpace context
* @param ePerson the ePerson to search for
* @return the Orcid token, if any
*/
public OrcidToken findByEPerson(Context context, EPerson ePerson);
/**
* Find an OrcidToken by profileItem.
*
* @param context the DSpace context
* @param profileItem the profile item to search for
* @return the Orcid token, if any
*/
public OrcidToken findByProfileItem(Context context, Item profileItem);
/**
* Delete the given ORCID token entity.
*
* @param context the DSpace context
* @param orcidToken the ORCID token entity to delete
*/
public void delete(Context context, OrcidToken orcidToken);
/**
* Delete all the ORCID token entities.
*
* @param context the DSpace context
*/
public void deleteAll(Context context);
/**
* Deletes the ORCID token entity related to the given EPerson.
*
* @param context the DSpace context
* @param ePerson the ePerson for the deletion
*/
public void deleteByEPerson(Context context, EPerson ePerson);
/**
* Deletes the ORCID token entity related to the given profile item.
*
* @param context the DSpace context
* @param profileItem the item for the deletion
*/
public void deleteByProfileItem(Context context, Item profileItem);
}

View File

@@ -0,0 +1,273 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service.impl;
import static java.time.LocalDateTime.now;
import static java.time.format.DateTimeFormatter.ISO_DATE_TIME;
import static java.util.List.of;
import static java.util.Optional.ofNullable;
import static org.apache.commons.lang3.EnumUtils.isValidEnum;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.dspace.content.Item.ANY;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.codec.binary.StringUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.model.OrcidEntityType;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.OrcidEntitySyncPreference;
import org.dspace.app.profile.OrcidProfileDisconnectionMode;
import org.dspace.app.profile.OrcidProfileSyncPreference;
import org.dspace.app.profile.OrcidSynchronizationMode;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link OrcidSynchronizationService}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService {
@Autowired
private ItemService itemService;
@Autowired
private ConfigurationService configurationService;
@Autowired
private EPersonService ePersonService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException {
EPerson ePerson = ePersonService.findByProfileItem(context, profile);
if (ePerson == null) {
throw new IllegalArgumentException(
"The given profile item is not related to any eperson. Item id: " + profile.getID());
}
String orcid = token.getOrcid();
String accessToken = token.getAccessToken();
String[] scopes = token.getScopeAsArray();
itemService.setMetadataSingleValue(context, profile, "person", "identifier", "orcid", null, orcid);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
for (String scope : scopes) {
itemService.addMetadata(context, profile, "dspace", "orcid", "scope", null, scope);
}
if (isBlank(itemService.getMetadataFirstValue(profile, "dspace", "orcid", "authenticated", Item.ANY))) {
String currentDate = ISO_DATE_TIME.format(now());
itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "authenticated", null, currentDate);
}
setAccessToken(context, profile, ePerson, accessToken);
EPerson ePersonByOrcid = ePersonService.findByNetid(context, orcid);
if (ePersonByOrcid == null && isBlank(ePerson.getNetid())) {
ePerson.setNetid(orcid);
updateEPerson(context, ePerson);
}
updateItem(context, profile);
}
@Override
public void unlinkProfile(Context context, Item profile) throws SQLException {
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
orcidTokenService.deleteByProfileItem(context, profile);
updateItem(context, profile);
}
@Override
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type,
OrcidEntitySyncPreference value) throws SQLException {
String metadataQualifier = "sync-" + type.name().toLowerCase() + "s";
return updatePreferenceForSynchronizingWithOrcid(context, profile, metadataQualifier, of(value.name()));
}
@Override
public boolean setProfilePreference(Context context, Item profile, List<OrcidProfileSyncPreference> values)
throws SQLException {
List<String> valuesAsString = values.stream()
.map(OrcidProfileSyncPreference::name)
.collect(Collectors.toList());
return updatePreferenceForSynchronizingWithOrcid(context, profile, "sync-profile", valuesAsString);
}
@Override
public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value)
throws SQLException {
if (!isLinkedToOrcid(context, profile)) {
throw new IllegalArgumentException("The given profile cannot be configured for the ORCID "
+ "synchronization because it is not linked to any ORCID account: "
+ profile.getID());
}
String newValue = value.name();
String oldValue = itemService.getMetadataFirstValue(profile, "dspace", "orcid", "sync-mode", Item.ANY);
if (StringUtils.equals(oldValue, newValue)) {
return false;
} else {
itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "sync-mode", null, value.name());
return true;
}
}
@Override
public Optional<OrcidSynchronizationMode> getSynchronizationMode(Item item) {
return getMetadataValue(item, "dspace.orcid.sync-mode")
.map(metadataValue -> metadataValue.getValue())
.filter(value -> isValidEnum(OrcidSynchronizationMode.class, value))
.map(value -> OrcidSynchronizationMode.valueOf(value));
}
@Override
public Optional<OrcidEntitySyncPreference> getEntityPreference(Item item, OrcidEntityType entityType) {
return getMetadataValue(item, "dspace.orcid.sync-" + entityType.name().toLowerCase() + "s")
.map(metadataValue -> metadataValue.getValue())
.filter(value -> isValidEnum(OrcidEntitySyncPreference.class, value))
.map(value -> OrcidEntitySyncPreference.valueOf(value));
}
@Override
public List<OrcidProfileSyncPreference> getProfilePreferences(Item item) {
return getMetadataValues(item, "dspace.orcid.sync-profile")
.map(MetadataValue::getValue)
.filter(value -> isValidEnum(OrcidProfileSyncPreference.class, value))
.map(value -> OrcidProfileSyncPreference.valueOf(value))
.collect(Collectors.toList());
}
@Override
public boolean isLinkedToOrcid(Context context, Item item) {
return getOrcidAccessToken(context, item).isPresent() && getOrcid(item).isPresent();
}
@Override
public OrcidProfileDisconnectionMode getDisconnectionMode() {
String value = configurationService.getProperty("orcid.disconnection.allowed-users");
if (!OrcidProfileDisconnectionMode.isValid(value)) {
return OrcidProfileDisconnectionMode.DISABLED;
}
return OrcidProfileDisconnectionMode.fromString(value);
}
private void setAccessToken(Context context, Item profile, EPerson ePerson, String accessToken) {
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, ePerson);
if (orcidToken == null) {
orcidTokenService.create(context, ePerson, profile, accessToken);
} else {
orcidToken.setProfileItem(profile);
orcidToken.setAccessToken(accessToken);
}
}
private boolean updatePreferenceForSynchronizingWithOrcid(Context context, Item profile,
String metadataQualifier,
List<String> values) throws SQLException {
if (!isLinkedToOrcid(context, profile)) {
throw new IllegalArgumentException("The given profile cannot be configured for the ORCID "
+ "synchronization because it is not linked to any ORCID account: "
+ profile.getID());
}
List<String> oldValues = itemService.getMetadata(profile, "dspace", "orcid", metadataQualifier, ANY).stream()
.map(metadataValue -> metadataValue.getValue())
.collect(Collectors.toList());
if (containsSameValues(oldValues, values)) {
return false;
}
itemService.clearMetadata(context, profile, "dspace", "orcid", metadataQualifier, ANY);
for (String value : values) {
itemService.addMetadata(context, profile, "dspace", "orcid", metadataQualifier, null, value);
}
return true;
}
private boolean containsSameValues(List<String> firstList, List<String> secondList) {
return new HashSet<>(firstList).equals(new HashSet<>(secondList));
}
private Optional<String> getOrcidAccessToken(Context context, Item item) {
return ofNullable(orcidTokenService.findByProfileItem(context, item))
.map(orcidToken -> orcidToken.getAccessToken());
}
public Optional<String> getOrcid(Item item) {
return getMetadataValue(item, "person.identifier.orcid")
.map(metadataValue -> metadataValue.getValue());
}
private Optional<MetadataValue> getMetadataValue(Item item, String metadataField) {
return getMetadataValues(item, metadataField).findFirst();
}
private Stream<MetadataValue> getMetadataValues(Item item, String metadataField) {
return item.getMetadata().stream()
.filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.')));
}
private void updateItem(Context context, Item item) throws SQLException {
try {
context.turnOffAuthorisationSystem();
itemService.update(context, item);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
} finally {
context.restoreAuthSystemState();
}
}
private void updateEPerson(Context context, EPerson ePerson) throws SQLException {
try {
ePersonService.update(context, ePerson);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,99 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service.impl;
import java.sql.SQLException;
import java.util.List;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.dao.OrcidTokenDAO;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link OrcidTokenService}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidTokenServiceImpl implements OrcidTokenService {
@Autowired
private OrcidTokenDAO orcidTokenDAO;
@Override
public OrcidToken create(Context context, EPerson ePerson, String accessToken) {
return create(context, ePerson, null, accessToken);
}
@Override
public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken) {
OrcidToken orcidToken = new OrcidToken();
orcidToken.setAccessToken(accessToken);
orcidToken.setEPerson(ePerson);
orcidToken.setProfileItem(profileItem);
try {
return orcidTokenDAO.create(context, orcidToken);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public OrcidToken findByEPerson(Context context, EPerson ePerson) {
return orcidTokenDAO.findByEPerson(context, ePerson);
}
@Override
public OrcidToken findByProfileItem(Context context, Item profileItem) {
return orcidTokenDAO.findByProfileItem(context, profileItem);
}
@Override
public void delete(Context context, OrcidToken orcidToken) {
try {
orcidTokenDAO.delete(context, orcidToken);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void deleteAll(Context context) {
try {
List<OrcidToken> tokens = orcidTokenDAO.findAll(context, OrcidToken.class);
for (OrcidToken token : tokens) {
delete(context, token);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void deleteByEPerson(Context context, EPerson ePerson) {
OrcidToken orcidToken = findByEPerson(context, ePerson);
if (orcidToken != null) {
delete(context, orcidToken);
}
}
@Override
public void deleteByProfileItem(Context context, Item profileItem) {
OrcidToken orcidToken = findByProfileItem(context, profileItem);
if (orcidToken != null) {
delete(context, orcidToken);
}
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* preferences.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidEntitySyncPreference {
/**
* Preference to be set to disable the synchronization with ORCID of the
* specific entity.
*/
DISABLED,
/**
* Preference to be set to enable the synchronization with ORCID of all items
* relating to the specific entity.
*/
ALL
}

View File

@@ -0,0 +1,97 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static java.time.LocalDateTime.now;
import static java.time.format.DateTimeFormatter.ISO_DATE_TIME;
import static org.apache.commons.collections.CollectionUtils.isNotEmpty;
import static org.dspace.content.Item.ANY;
import java.sql.SQLException;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.service.AfterResearcherProfileCreationAction;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
/**
* Implementation of {@link AfterResearcherProfileCreationAction} that copy the
* ORCID metadata, if any, from the owner to the researcher profile item.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Order(Ordered.HIGHEST_PRECEDENCE)
public class OrcidMetadataCopyingAction implements AfterResearcherProfileCreationAction {
@Autowired
private ItemService itemService;
@Autowired
private EPersonService ePersonService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException {
Item item = researcherProfile.getItem();
copyMetadataValues(context, owner, "eperson.orcid", item, "person.identifier.orcid");
copyMetadataValues(context, owner, "eperson.orcid.scope", item, "dspace.orcid.scope");
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, owner);
if (orcidToken != null) {
orcidToken.setProfileItem(item);
}
if (isLinkedToOrcid(owner, orcidToken)) {
String currentDate = ISO_DATE_TIME.format(now());
itemService.setMetadataSingleValue(context, item, "dspace", "orcid", "authenticated", null, currentDate);
}
}
private void copyMetadataValues(Context context, EPerson ePerson, String ePersonMetadataField, Item item,
String itemMetadataField) throws SQLException {
List<String> values = getMetadataValues(ePerson, ePersonMetadataField);
if (CollectionUtils.isEmpty(values)) {
return;
}
MetadataFieldName metadata = new MetadataFieldName(itemMetadataField);
itemService.clearMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, ANY);
itemService.addMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, null, values);
}
private boolean isLinkedToOrcid(EPerson ePerson, OrcidToken orcidToken) {
return isNotEmpty(getMetadataValues(ePerson, "eperson.orcid")) && orcidToken != null;
}
private List<String> getMetadataValues(EPerson ePerson, String metadataField) {
return ePersonService.getMetadataByMetadataString(ePerson, metadataField).stream()
.map(MetadataValue::getValue)
.collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static org.apache.commons.lang3.EnumUtils.isValidEnum;
/**
* Enum that models all the available values of the property that which
* determines which users can disconnect a profile from an ORCID account.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidProfileDisconnectionMode {
/**
* The disconnection is disabled.
*/
DISABLED,
/**
* Only the profile's owner can disconnect that profile from ORCID.
*/
ONLY_OWNER,
/**
* Only the admins can disconnect profiles from ORCID.
*/
ONLY_ADMIN,
/**
* Only the admin or the profile's owner can disconnect that profile from ORCID.
*/
ADMIN_AND_OWNER;
public static boolean isValid(String mode) {
return mode != null ? isValidEnum(OrcidProfileDisconnectionMode.class, mode.toUpperCase()) : false;
}
public static OrcidProfileDisconnectionMode fromString(String mode) {
return isValid(mode) ? OrcidProfileDisconnectionMode.valueOf(mode.toUpperCase()) : null;
}
}

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* preferences for the user's profile.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidProfileSyncPreference {
/**
* Data relating to the name, country and keywords of the ORCID profile.
*/
BIOGRAPHICAL,
/**
* Data relating to external identifiers and researcher urls of the ORCID
* profile.
*/
IDENTIFIERS;
}

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* mode.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidSynchronizationMode {
/**
* Mode in which the user can manually decide when to synchronize data with
* ORCID.
*/
MANUAL,
/**
* Mode in which synchronizations with ORCID occur through an automatic process.
*/
BATCH;
}

View File

@@ -63,6 +63,11 @@ public class ResearcherProfile {
return item;
}
public Optional<String> getOrcid() {
return getMetadataValue(item, "person.identifier.orcid")
.map(metadataValue -> metadataValue.getValue());
}
private MetadataValue getDspaceObjectOwnerMetadata(Item item) {
return getMetadataValue(item, "dspace.object.owner")
.filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null)

View File

@@ -18,14 +18,18 @@ import static org.dspace.eperson.Group.ANONYMOUS;
import java.io.IOException;
import java.net.URI;
import java.sql.SQLException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import javax.annotation.PostConstruct;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.exception.ResourceAlreadyExistsException;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.profile.service.AfterResearcherProfileCreationAction;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
@@ -88,6 +92,21 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
@Autowired
private AuthorizeService authorizeService;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired(required = false)
private List<AfterResearcherProfileCreationAction> afterCreationActions;
@PostConstruct
public void postConstruct() {
if (afterCreationActions == null) {
afterCreationActions = Collections.emptyList();
}
}
@Override
public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException {
Assert.notNull(id, "An id must be provided to find a researcher profile");
@@ -113,15 +132,16 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
.orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles"));
context.turnOffAuthorisationSystem();
try {
Item item = createProfileItem(context, ePerson, collection);
context.restoreAuthSystemState();
Item item = createProfileItem(context, ePerson, collection);
return new ResearcherProfile(item);
ResearcherProfile researcherProfile = new ResearcherProfile(item);
} finally {
context.restoreAuthSystemState();
for (AfterResearcherProfileCreationAction afterCreationAction : afterCreationActions) {
afterCreationAction.perform(context, researcherProfile, ePerson);
}
return researcherProfile;
}
@Override
@@ -137,6 +157,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
deleteItem(context, profileItem);
} else {
removeOwnerMetadata(context, profileItem);
orcidSynchronizationService.unlinkProfile(context, profileItem);
}
}
@@ -206,7 +227,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
if (StringUtils.isBlank(profileType)) {
return false;
}
return profileType.equals(itemService.getEntityType(item));
return profileType.equals(itemService.getEntityTypeLabel(item));
}
@Override
@@ -230,7 +251,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
Iterator<Item> items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString());
while (items.hasNext()) {
Item item = items.next();
String entityType = itemService.getEntityType(item);
String entityType = itemService.getEntityTypeLabel(item);
if (profileType.equals(entityType)) {
return item;
}

View File

@@ -0,0 +1,35 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile.service;
import java.sql.SQLException;
import org.dspace.app.profile.ResearcherProfile;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Interface to mark classes that allow to perform additional logic on created
* researcher profile.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface AfterResearcherProfileCreationAction {
/**
* Perform some actions on the given researcher profile and returns the updated
* profile.
*
* @param context the DSpace context
* @param researcherProfile the created researcher profile
* @param owner the EPerson that is owner of the given profile
* @throws SQLException if a SQL error occurs
*/
void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException;
}

View File

@@ -0,0 +1,175 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.IndexingService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.SolrUtils;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to update solr items with "predb" status to either:
* - Delete them from solr if they're not present in the database
* - Remove their status if they're present in the database
*/
public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliScriptConfiguration> {
/* Log4j logger */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class);
public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex";
private IndexingService indexingService;
private SolrSearchCore solrSearchCore;
private IndexObjectFactoryFactory indexObjectServiceFactory;
private ConfigurationService configurationService;
private int timeUntilReindex = 0;
private String maxTime;
@Override
public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class);
}
public static void runScheduled() throws Exception {
SolrDatabaseResyncCli script = new SolrDatabaseResyncCli();
script.setup();
script.internalRun();
}
@Override
public void setup() throws ParseException {
indexingService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), IndexingService.class);
solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
.getServicesByType(SolrSearchCore.class).get(0);
indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
}
@Override
public void internalRun() throws Exception {
logInfoAndOut("Starting Item resync of Solr and Database...");
timeUntilReindex = getTimeUntilReindex();
maxTime = getMaxTime();
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performStatusUpdate(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
logDebugAndOut("Date range filter used; " + dateRangeFilter);
solrQuery.addFilterQuery(dateRangeFilter);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
if (response != null) {
logInfoAndOut(response.getResults().size() + " items found to process");
for (SolrDocument doc : response.getResults()) {
String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD);
String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID);
logDebugAndOut("Processing item with UUID: " + uuid);
Optional<IndexableObject> indexableObject = Optional.empty();
try {
indexableObject = indexObjectServiceFactory
.getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid);
} catch (SQLException e) {
log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid +
"\" from the database, removing related solr document", e);
}
try {
if (indexableObject.isPresent()) {
logDebugAndOut("Item exists in DB, updating solr document");
updateItem(context, indexableObject.get());
} else {
logDebugAndOut("Item doesn't exist in DB, removing solr document");
removeItem(context, uniqueId);
}
} catch (SQLException | IOException e) {
log.error(e.getMessage(), e);
}
}
}
indexingService.commit();
}
private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException {
Map<String,Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("remove", STATUS_FIELD_PREDB);
indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier);
}
private void removeItem(Context context, String uniqueId) throws IOException, SQLException {
indexingService.unIndexContent(context, uniqueId);
}
private String getMaxTime() {
Calendar cal = Calendar.getInstance();
if (timeUntilReindex > 0) {
cal.add(Calendar.MILLISECOND, -timeUntilReindex);
}
return SolrUtils.getDateFormatter().format(cal.getTime());
}
private int getTimeUntilReindex() {
return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0);
}
private void logInfoAndOut(String message) {
log.info(message);
System.out.println(message);
}
private void logDebugAndOut(String message) {
log.debug(message);
System.out.println(message);
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script.
*/
public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration<SolrDatabaseResyncCli> {
private Class<SolrDatabaseResyncCli> dspaceRunnableClass;
@Override
public Class<SolrDatabaseResyncCli> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<SolrDatabaseResyncCli> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
options = new Options();
}
return options;
}
}

View File

@@ -0,0 +1,104 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.kernel.ServiceManager;
import org.dspace.utils.DSpace;
/**
* Implementation of {@link AuthenticationMethod} that delegate all the method
* invocations to the bean of class {@link OrcidAuthenticationBean}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthentication implements AuthenticationMethod {
private final ServiceManager serviceManager = new DSpace().getServiceManager();
/**
* Check if OrcidAuthentication plugin is enabled
* @return true if enabled, false otherwise
*/
public static boolean isEnabled() {
String pluginName = new OrcidAuthentication().getName();
Iterator<AuthenticationMethod> authenticationMethodIterator = AuthenticateServiceFactory.getInstance()
.getAuthenticationService().authenticationMethodIterator();
while (authenticationMethodIterator.hasNext()) {
if (pluginName.equals(authenticationMethodIterator.next().getName())) {
return true;
}
}
return false;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().canSelfRegister(context, request, username);
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
getOrcidAuthentication().initEPerson(context, request, eperson);
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().allowSetPassword(context, request, username);
}
@Override
public boolean isImplicit() {
return getOrcidAuthentication().isImplicit();
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return getOrcidAuthentication().getSpecialGroups(context, request);
}
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
return getOrcidAuthentication().authenticate(context, username, password, realm, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
return getOrcidAuthentication().loginPageURL(context, request, response);
}
@Override
public String getName() {
return getOrcidAuthentication().getName();
}
private OrcidAuthenticationBean getOrcidAuthentication() {
return serviceManager.getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class);
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return getOrcidAuthentication().isUsed(context, request);
}
}

View File

@@ -0,0 +1,330 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import static java.lang.String.format;
import static java.net.URLEncoder.encode;
import static org.apache.commons.lang.BooleanUtils.toBoolean;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.dspace.content.Item.ANY;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.client.OrcidClient;
import org.dspace.app.orcid.client.OrcidConfiguration;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.ResearcherProfile;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
import org.orcid.jaxb.model.v3.release.record.Email;
import org.orcid.jaxb.model.v3.release.record.Person;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* ORCID authentication for DSpace.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthenticationBean implements AuthenticationMethod {
public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication";
private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class);
private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s";
@Autowired
private OrcidClient orcidClient;
@Autowired
private OrcidConfiguration orcidConfiguration;
@Autowired
private ConfigurationService configurationService;
@Autowired
private EPersonService ePersonService;
@Autowired
private ResearcherProfileService researcherProfileService;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
if (request == null) {
LOGGER.warn("Unable to authenticate using ORCID because the request object is null.");
return BAD_ARGS;
}
String code = (String) request.getParameter("code");
if (StringUtils.isEmpty(code)) {
LOGGER.warn("The incoming request has not code parameter");
return NO_SUCH_USER;
}
request.setAttribute(ORCID_AUTH_ATTRIBUTE, true);
return authenticateWithOrcid(context, code, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
String authorizeUrl = orcidConfiguration.getAuthorizeEndpointUrl();
String clientId = orcidConfiguration.getClientId();
String redirectUri = orcidConfiguration.getRedirectUrl();
String scopes = String.join("+", orcidConfiguration.getScopes());
if (StringUtils.isAnyBlank(authorizeUrl, clientId, redirectUri, scopes)) {
LOGGER.error("Missing mandatory configuration properties for OrcidAuthentication");
return "";
}
try {
return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8"));
} catch (UnsupportedEncodingException e) {
LOGGER.error(e.getMessage(), e);
return "";
}
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return request.getAttribute(ORCID_AUTH_ATTRIBUTE) != null;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return canSelfRegister();
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return false;
}
@Override
public boolean isImplicit() {
return false;
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return Collections.emptyList();
}
@Override
public String getName() {
return "orcid";
}
private int authenticateWithOrcid(Context context, String code, HttpServletRequest request) throws SQLException {
OrcidTokenResponseDTO token = getOrcidAccessToken(code);
if (token == null) {
return NO_SUCH_USER;
}
String orcid = token.getOrcid();
EPerson ePerson = ePersonService.findByNetid(context, orcid);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
Person person = getPersonFromOrcid(token);
if (person == null) {
return NO_SUCH_USER;
}
String email = getEmail(person).orElse(null);
ePerson = ePersonService.findByEmail(context, email);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER;
}
private int logInEPerson(Context context, OrcidTokenResponseDTO token, EPerson ePerson)
throws SQLException {
context.setCurrentUser(ePerson);
setOrcidMetadataOnEPerson(context, ePerson, token);
ResearcherProfile profile = findProfile(context, ePerson);
if (profile != null) {
orcidSynchronizationService.linkProfile(context, profile.getItem(), token);
}
return SUCCESS;
}
private ResearcherProfile findProfile(Context context, EPerson ePerson) throws SQLException {
try {
return researcherProfileService.findById(context, ePerson.getID());
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
}
private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException {
try {
context.turnOffAuthorisationSystem();
String email = getEmail(person)
.orElseThrow(() -> new IllegalStateException("The email is configured private on orcid"));
String orcid = token.getOrcid();
EPerson eperson = ePersonService.create(context);
eperson.setNetid(orcid);
eperson.setEmail(email);
Optional<String> firstName = getFirstName(person);
if (firstName.isPresent()) {
eperson.setFirstName(context, firstName.get());
}
Optional<String> lastName = getLastName(person);
if (lastName.isPresent()) {
eperson.setLastName(context, lastName.get());
}
eperson.setCanLogIn(true);
eperson.setSelfRegistered(true);
setOrcidMetadataOnEPerson(context, eperson, token);
ePersonService.update(context, eperson);
context.setCurrentUser(eperson);
context.dispatchEvents();
return SUCCESS;
} catch (Exception ex) {
LOGGER.error("An error occurs registering a new EPerson from ORCID", ex);
context.rollback();
return NO_SUCH_USER;
} finally {
context.restoreAuthSystemState();
}
}
private void setOrcidMetadataOnEPerson(Context context, EPerson person, OrcidTokenResponseDTO token)
throws SQLException {
String orcid = token.getOrcid();
String accessToken = token.getAccessToken();
String[] scopes = token.getScopeAsArray();
ePersonService.setMetadataSingleValue(context, person, "eperson", "orcid", null, null, orcid);
ePersonService.clearMetadata(context, person, "eperson", "orcid", "scope", ANY);
for (String scope : scopes) {
ePersonService.addMetadata(context, person, "eperson", "orcid", "scope", null, scope);
}
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, person);
if (orcidToken == null) {
orcidTokenService.create(context, person, accessToken);
} else {
orcidToken.setAccessToken(accessToken);
}
}
private Person getPersonFromOrcid(OrcidTokenResponseDTO token) {
try {
return orcidClient.getPerson(token.getAccessToken(), token.getOrcid());
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID record with id " + token.getOrcid(), ex);
return null;
}
}
private Optional<String> getEmail(Person person) {
List<Email> emails = person.getEmails() != null ? person.getEmails().getEmails() : Collections.emptyList();
if (CollectionUtils.isEmpty(emails)) {
return Optional.empty();
}
return Optional.ofNullable(emails.get(0).getEmail());
}
private Optional<String> getFirstName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getGivenNames())
.map(givenNames -> givenNames.getContent());
}
private Optional<String> getLastName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getFamilyName())
.map(givenNames -> givenNames.getContent());
}
private boolean canSelfRegister() {
String canSelfRegister = configurationService.getProperty("authentication-orcid.can-self-register", "true");
if (isBlank(canSelfRegister)) {
return true;
}
return toBoolean(canSelfRegister);
}
private OrcidTokenResponseDTO getOrcidAccessToken(String code) {
try {
return orcidClient.getAccessToken(code);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID access_token", ex);
return null;
}
}
public OrcidClient getOrcidClient() {
return orcidClient;
}
public void setOrcidClient(OrcidClient orcidClient) {
this.orcidClient = orcidClient;
}
}

View File

@@ -158,6 +158,11 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
}
bundle.addBitstream(bitstream);
// If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted
// (when removed from the original bundle)
if (bitstream.isDeleted()) {
bitstream.setDeleted(false);
}
bitstream.getBundles().add(bundle);

View File

@@ -621,8 +621,14 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
});
for (MetadataValue metadataValue : metadataValues) {
//Retrieve & store the place for each metadata value
if (StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX) &&
((RelationshipMetadataValue) metadataValue).isUseForPlace()) {
if (
// For virtual MDVs with useForPlace=true,
// update both the place of the metadatum and the place of the Relationship.
// E.g. for an Author relationship,
// the place should be updated using the same principle as dc.contributor.author.
StringUtils.startsWith(metadataValue.getAuthority(), Constants.VIRTUAL_AUTHORITY_PREFIX)
&& ((RelationshipMetadataValue) metadataValue).isUseForPlace()
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace);
String authority = metadataValue.getAuthority();
@@ -635,8 +641,16 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
}
relationshipService.update(context, relationship);
} else if (!StringUtils.startsWith(metadataValue.getAuthority(),
Constants.VIRTUAL_AUTHORITY_PREFIX)) {
} else if (
// Otherwise, just set the place of the metadatum
// ...unless the metadatum in question is a relation.* metadatum.
// This case is a leftover from when a Relationship is removed and copied to metadata.
// If we let its place change the order of any remaining Relationships will be affected.
// todo: this makes it so these leftover MDVs can't be reordered later on
!StringUtils.equals(
metadataValue.getMetadataField().getMetadataSchema().getName(), "relation"
)
) {
int mvPlace = getMetadataValuePlace(fieldToLastPlace, metadataValue);
metadataValue.setPlace(mvPlace);
}

View File

@@ -26,6 +26,8 @@ import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.util.AuthorizeUtil;
import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException;
@@ -40,6 +42,7 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataSchemaService;
@@ -120,6 +123,12 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true)
private RelationshipMetadataService relationshipMetadataService;
@Autowired(required = true)
private EntityTypeService entityTypeService;
@Autowired
private OrcidTokenService orcidTokenService;
protected ItemServiceImpl() {
super();
}
@@ -241,6 +250,10 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
return itemDAO.findAll(context, true, true);
}
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
return itemDAO.findAllRegularItems(context);
};
@Override
public Iterator<Item> findBySubmitter(Context context, EPerson eperson) throws SQLException {
return itemDAO.findBySubmitter(context, eperson);
@@ -724,7 +737,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
+ item.getID()));
// Remove relationships
for (Relationship relationship : relationshipService.findByItem(context, item)) {
for (Relationship relationship : relationshipService.findByItem(context, item, -1, -1, false, false)) {
relationshipService.forceDelete(context, relationship, false, false);
}
@@ -744,6 +757,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
harvestedItemService.delete(context, hi);
}
OrcidToken orcidToken = orcidTokenService.findByProfileItem(context, item);
if (orcidToken != null) {
orcidToken.setProfileItem(null);
}
//Only clear collections after we have removed everything else from the item
item.clearCollections();
item.setOwningCollection(null);
@@ -1580,8 +1598,36 @@ prevent the generation of resource policy entry values with null dspace_object a
}
@Override
public String getEntityType(Item item) {
return getMetadataFirstValue(item, new MetadataFieldName("dspace.entity.type"), Item.ANY);
public String getEntityTypeLabel(Item item) {
List<MetadataValue> mdvs = getMetadata(item, "dspace", "entity", "type", Item.ANY, false);
if (mdvs.isEmpty()) {
return null;
}
if (mdvs.size() > 1) {
log.warn(
"Item with uuid {}, handle {} has {} entity types ({}), expected 1 entity type",
item.getID(), item.getHandle(), mdvs.size(),
mdvs.stream().map(MetadataValue::getValue).collect(Collectors.toList())
);
}
String entityType = mdvs.get(0).getValue();
if (StringUtils.isBlank(entityType)) {
return null;
}
return entityType;
}
@Override
public EntityType getEntityType(Context context, Item item) throws SQLException {
String entityTypeString = getEntityTypeLabel(item);
if (StringUtils.isBlank(entityTypeString)) {
return null;
}
return entityTypeService.findByEntityType(context, entityTypeString);
}
}

View File

@@ -89,6 +89,15 @@ public class Relationship implements ReloadableEntity<Integer> {
@Column(name = "rightward_value")
private String rightwardValue;
/**
* Whether the left and/or right side of a given relationship are the "latest".
* A side of a relationship is "latest" if the item on that side has either no other versions,
* or the item on that side is the most recent version that is relevant to the given relationship.
* This column affects what version of an item appears on search pages or the relationship listings of other items.
*/
@Column(name = "latest_version_status")
private LatestVersionStatus latestVersionStatus = LatestVersionStatus.BOTH;
/**
* Protected constructor, create object using:
* {@link org.dspace.content.service.RelationshipService#create(Context)} }
@@ -216,6 +225,39 @@ public class Relationship implements ReloadableEntity<Integer> {
this.rightwardValue = rightwardValue;
}
/**
* Getter for {@link #latestVersionStatus}.
* @return the latest version status of this relationship.
*/
public LatestVersionStatus getLatestVersionStatus() {
return latestVersionStatus;
}
/**
* Setter for {@link #latestVersionStatus}.
* @param latestVersionStatus the new latest version status for this relationship.
*/
public void setLatestVersionStatus(LatestVersionStatus latestVersionStatus) {
if (this.latestVersionStatus == latestVersionStatus) {
return; // no change or cache reset needed
}
this.latestVersionStatus = latestVersionStatus;
// on one item, relation.* fields will change
// on the other item, relation.*.latestForDiscovery will change
leftItem.setMetadataModified();
rightItem.setMetadataModified();
}
public enum LatestVersionStatus {
// NOTE: SQL migration expects BOTH to be the first constant in this enum!
BOTH, // both items in this relationship are the "latest"
LEFT_ONLY, // the left-hand item of this relationship is the "latest", but the right-hand item is not
RIGHT_ONLY // the right-hand item of this relationship is the "latest", but the left-hand item is not
// NOTE: one side of any given relationship should ALWAYS be the "latest"
}
/**
* Standard getter for the ID for this Relationship
* @return The ID of this relationship

View File

@@ -56,7 +56,9 @@ public interface RelationshipMetadataService {
* This method will retrieve the EntityType String from an item
* @param item The Item for which the entityType String will be returned
* @return A String value indicating the entityType
* @deprecated use {@link org.dspace.content.service.ItemService#getEntityTypeLabel(Item)} instead.
*/
@Deprecated
public String getEntityTypeStringFromMetadata(Item item);
}

View File

@@ -7,16 +7,24 @@
*/
package org.dspace.content;
import static org.dspace.content.RelationshipType.Tilted.LEFT;
import static org.dspace.content.RelationshipType.Tilted.RIGHT;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants;
@@ -33,6 +41,12 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
@Autowired(required = true)
protected RelationshipService relationshipService;
@Autowired(required = true)
protected RelationshipTypeService relationshipTypeService;
@Autowired(required = true)
protected ItemService itemService;
@Autowired(required = true)
protected VirtualMetadataPopulator virtualMetadataPopulator;
@@ -44,12 +58,25 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
Context context = new Context();
List<RelationshipMetadataValue> fullMetadataValueList = new LinkedList<>();
try {
String entityType = getEntityTypeStringFromMetadata(item);
if (StringUtils.isNotBlank(entityType)) {
EntityType entityType = itemService.getEntityType(context, item);
if (entityType != null) {
// NOTE: The following code will add metadata fields of type relation.*.latestForDiscovery
// (e.g. relation.isAuthorOfPublication.latestForDiscovery).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of the other item. In other words, given a relationship with this item,
// the current item should have "latest status" in order for the other item to appear in
// relation.*.latestForDiscovery fields.
fullMetadataValueList.addAll(findLatestForDiscoveryMetadataValues(context, item, entityType));
// NOTE: The following code will, among other things,
// add metadata fields of type relation.* (e.g. relation.isAuthorOfPublication).
// These fields contain the UUIDs of the items that have a relationship with current item,
// from the perspective of this item. In other words, given a relationship with this item,
// the other item should have "latest status" in order to appear in relation.* fields.
List<Relationship> relationships = relationshipService.findByItem(context, item, -1, -1, true);
for (Relationship relationship : relationships) {
fullMetadataValueList
.addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType,
.addAll(findRelationshipMetadataValueForItemRelationship(context, item, entityType.getLabel(),
relationship, enableVirtualMetadata));
}
@@ -60,16 +87,90 @@ public class RelationshipMetadataServiceImpl implements RelationshipMetadataServ
return fullMetadataValueList;
}
public String getEntityTypeStringFromMetadata(Item item) {
List<MetadataValue> list = item.getMetadata();
for (MetadataValue mdv : list) {
if (StringUtils.equals(mdv.getMetadataField().getMetadataSchema().getName(), "dspace")
&& StringUtils.equals(mdv.getMetadataField().getElement(), "entity")
&& StringUtils.equals(mdv.getMetadataField().getQualifier(), "type")) {
return mdv.getValue();
/**
* Create the list of relation.*.latestForDiscovery virtual metadata values for the given item.
* @param context the DSpace context.
* @param item the item.
* @param itemEntityType the entity type of the item.
* @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
*/
protected List<RelationshipMetadataValue> findLatestForDiscoveryMetadataValues(
Context context, Item item, EntityType itemEntityType
) throws SQLException {
final String schema = MetadataSchemaEnum.RELATION.getName();
final String qualifier = "latestForDiscovery";
List<RelationshipMetadataValue> mdvs = new LinkedList<>();
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, itemEntityType);
for (RelationshipType relationshipType : relationshipTypes) {
// item is on left side of this relationship type
// NOTE: On the left item, we should index the uuids of the right items. If the relationship type is
// "tilted right", it means that we expect a huge amount of right items, so we don't index their uuids
// on the left item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the right items you won't be able to find the left item.
if (relationshipType.getTilted() != RIGHT && relationshipType.getLeftType().equals(itemEntityType)) {
String element = relationshipType.getLeftwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, true);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
}
// item is on right side of this relationship type
// NOTE: On the right item, we should index the uuids of the left items. If the relationship type is
// "tilted left", it means that we expect a huge amount of left items, so we don't index their uuids
// on the right item as a storage/performance improvement.
// As a consequence, when searching for related items (using discovery)
// on the pages of the left items you won't be able to find the right item.
if (relationshipType.getTilted() != LEFT && relationshipType.getRightType().equals(itemEntityType)) {
String element = relationshipType.getRightwardType();
List<ItemUuidAndRelationshipId> data = relationshipService
.findByLatestItemAndRelationshipType(context, item, relationshipType, false);
mdvs.addAll(constructLatestForDiscoveryMetadataValues(context, schema, element, qualifier, data));
}
}
return null;
return mdvs;
}
/**
* Turn the given data into a list of relation.*.latestForDiscovery virtual metadata values.
* @param context the DSpace context.
* @param schema the schema for all metadata values.
* @param element the element for all metadata values.
* @param qualifier the qualifier for all metadata values.
* @param data a POJO containing the item uuid and relationship id.
* @return a list (may be empty) of metadata values of type relation.*.latestForDiscovery.
*/
protected List<RelationshipMetadataValue> constructLatestForDiscoveryMetadataValues(
Context context, String schema, String element, String qualifier, List<ItemUuidAndRelationshipId> data
) {
String mdf = new MetadataFieldName(schema, element, qualifier).toString();
return data.stream()
.map(datum -> {
RelationshipMetadataValue mdv = constructMetadataValue(context, mdf);
if (mdv == null) {
return null;
}
mdv.setAuthority(Constants.VIRTUAL_AUTHORITY_PREFIX + datum.getRelationshipId());
mdv.setValue(datum.getItemUuid().toString());
// NOTE: place has no meaning for relation.*.latestForDiscovery metadata fields
mdv.setPlace(-1);
mdv.setUseForPlace(false);
return mdv;
})
.filter(Objects::nonNull)
.collect(Collectors.toUnmodifiableList());
}
@Override
@Deprecated
public String getEntityTypeStringFromMetadata(Item item) {
return itemService.getEntityTypeLabel(item);
}
@Override

View File

@@ -10,9 +10,11 @@ package org.dspace.content;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
@@ -20,15 +22,19 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.content.virtual.VirtualMetadataConfiguration;
import org.dspace.content.virtual.VirtualMetadataPopulator;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.springframework.beans.factory.annotation.Autowired;
public class RelationshipServiceImpl implements RelationshipService {
@@ -55,6 +61,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Autowired
private RelationshipMetadataService relationshipMetadataService;
@Autowired
private RelationshipVersioningUtils relationshipVersioningUtils;
@Autowired
private VirtualMetadataPopulator virtualMetadataPopulator;
@@ -76,9 +86,10 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override
public Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType,
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue)
throws AuthorizeException, SQLException {
public Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException {
Relationship relationship = new Relationship();
relationship.setLeftItem(leftItem);
relationship.setRightItem(rightItem);
@@ -87,9 +98,21 @@ public class RelationshipServiceImpl implements RelationshipService {
relationship.setRightPlace(rightPlace);
relationship.setLeftwardValue(leftwardValue);
relationship.setRightwardValue(rightwardValue);
relationship.setLatestVersionStatus(latestVersionStatus);
return create(c, relationship);
}
@Override
public Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException {
return create(
c, leftItem, rightItem, relationshipType, leftPlace, rightPlace, leftwardValue, rightwardValue,
LatestVersionStatus.BOTH
);
}
@Override
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException {
if (isRelationshipValidToCreate(context, relationship)) {
@@ -98,7 +121,7 @@ public class RelationshipServiceImpl implements RelationshipService {
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
Relationship relationshipToReturn = relationshipDAO.create(context, relationship);
updatePlaceInRelationship(context, relationshipToReturn);
updatePlaceInRelationship(context, relationshipToReturn, null, null, true, true);
update(context, relationshipToReturn);
updateItemsInRelationship(context, relationship);
return relationshipToReturn;
@@ -113,71 +136,388 @@ public class RelationshipServiceImpl implements RelationshipService {
}
@Override
public void updatePlaceInRelationship(Context context, Relationship relationship)
throws SQLException, AuthorizeException {
public Relationship move(
Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace
) throws SQLException, AuthorizeException {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
// Don't do anything if neither the leftPlace nor rightPlace was updated
if (newLeftPlace != null || newRightPlace != null) {
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, newLeftPlace, newRightPlace, false, false);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
} else {
throw new AuthorizeException(
"You do not have write rights on this relationship's items");
}
}
@Override
public Relationship move(
Context context, Relationship relationship, Item newLeftItem, Item newRightItem
) throws SQLException, AuthorizeException {
// If the new Item is the same as the current Item, don't move
newLeftItem = newLeftItem != relationship.getLeftItem() ? newLeftItem : null;
newRightItem = newRightItem != relationship.getRightItem() ? newRightItem : null;
// Don't do anything if neither the leftItem nor rightItem was updated
if (newLeftItem != null || newRightItem != null) {
// First move the Relationship to the back within the current Item's lists
// This ensures that we won't have any gaps once we move the Relationship to a different Item
move(
context, relationship,
newLeftItem != null ? -1 : null,
newRightItem != null ? -1 : null
);
boolean insertLeft = false;
boolean insertRight = false;
// If Item has been changed, mark the previous Item as modified to make sure we discard the old relation.*
// metadata on the next update.
// Set the Relationship's Items to the new ones, appending to the end
if (newLeftItem != null) {
relationship.getLeftItem().setMetadataModified();
relationship.setLeftItem(newLeftItem);
relationship.setLeftPlace(-1);
insertLeft = true;
}
if (newRightItem != null) {
relationship.getRightItem().setMetadataModified();
relationship.setRightItem(newRightItem);
relationship.setRightPlace(-1);
insertRight = true;
}
// This order of execution should be handled in the creation (create, updateplace, update relationship)
// for a proper place allocation
updatePlaceInRelationship(context, relationship, null, null, insertLeft, insertRight);
update(context, relationship);
updateItemsInRelationship(context, relationship);
}
return relationship;
}
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relationship.
*
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have its place updated and that will be used
* to retrieve the other relationships whose place might need to be updated.
* @param newLeftPlace If the Relationship in question is to be moved, the leftPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param newRightPlace If the Relationship in question is to be moved, the rightPlace it is to be moved to.
* Set this to null if the Relationship has not been moved, i.e. it has just been created,
* deleted or when its Items have been modified.
* @param insertLeft Whether the Relationship in question should be inserted into the left Item.
* Should be set to true when creating or moving to a different Item.
* @param insertRight Whether the Relationship in question should be inserted into the right Item.
* Should be set to true when creating or moving to a different Item.
* @throws SQLException If something goes wrong
* @throws AuthorizeException
* If the user is not authorized to update the Relationship or its Items
*/
private void updatePlaceInRelationship(
Context context, Relationship relationship,
Integer newLeftPlace, Integer newRightPlace, boolean insertLeft, boolean insertRight
) throws SQLException, AuthorizeException {
Item leftItem = relationship.getLeftItem();
// Max value is used to ensure that these will get added to the back of the list and thus receive the highest
// (last) place as it's set to a -1 for creation
if (relationship.getLeftPlace() == -1) {
relationship.setLeftPlace(Integer.MAX_VALUE);
}
Item rightItem = relationship.getRightItem();
if (relationship.getRightPlace() == -1) {
relationship.setRightPlace(Integer.MAX_VALUE);
}
List<Relationship> leftRelationships = findByItemAndRelationshipType(context,
leftItem,
relationship.getRelationshipType(), true);
List<Relationship> rightRelationships = findByItemAndRelationshipType(context,
rightItem,
relationship.getRelationshipType(),
false);
// These relationships are only deleted from the temporary lists incase they're present in them so that we can
// These list also include the non-latest. This is relevant to determine whether it's deleted.
// This can also imply there may be overlapping places, and/or the given relationship will overlap
// But the shift will allow this, and only happen when needed based on the latest status
List<Relationship> leftRelationships = findByItemAndRelationshipType(
context, leftItem, relationship.getRelationshipType(), true, -1, -1, false
);
List<Relationship> rightRelationships = findByItemAndRelationshipType(
context, rightItem, relationship.getRelationshipType(), false, -1, -1, false
);
// These relationships are only deleted from the temporary lists in case they're present in them so that we can
// properly perform our place calculation later down the line in this method.
if (leftRelationships.contains(relationship)) {
leftRelationships.remove(relationship);
}
if (rightRelationships.contains(relationship)) {
rightRelationships.remove(relationship);
}
boolean deletedFromLeft = !leftRelationships.contains(relationship);
boolean deletedFromRight = !rightRelationships.contains(relationship);
leftRelationships.remove(relationship);
rightRelationships.remove(relationship);
List<MetadataValue> leftMetadata = getSiblingMetadata(leftItem, relationship, true);
List<MetadataValue> rightMetadata = getSiblingMetadata(rightItem, relationship, false);
// For new relationships added to the end, this will be -1.
// For new relationships added at a specific position, this will contain that position.
// For existing relationships, this will contain the place before it was moved.
// For deleted relationships, this will contain the place before it was deleted.
int oldLeftPlace = relationship.getLeftPlace();
int oldRightPlace = relationship.getRightPlace();
boolean movedUpLeft = resolveRelationshipPlace(
relationship, true, leftRelationships, leftMetadata, oldLeftPlace, newLeftPlace
);
boolean movedUpRight = resolveRelationshipPlace(
relationship, false, rightRelationships, rightMetadata, oldRightPlace, newRightPlace
);
context.turnOffAuthorisationSystem();
//If useForPlace for the leftwardType is false for the relationshipType,
// we need to sort the relationships here based on leftplace.
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), true)) {
if (!leftRelationships.isEmpty()) {
leftRelationships.sort(Comparator.comparingInt(Relationship::getLeftPlace));
for (int i = 0; i < leftRelationships.size(); i++) {
leftRelationships.get(i).setLeftPlace(i);
}
relationship.setLeftPlace(leftRelationships.size());
} else {
relationship.setLeftPlace(0);
}
} else {
updateItem(context, leftItem);
//only shift if the place is relevant for the latest relationships
if (relationshipVersioningUtils.otherSideIsLatest(true, relationship.getLatestVersionStatus())) {
shiftSiblings(
relationship, true, oldLeftPlace, movedUpLeft, insertLeft, deletedFromLeft,
leftRelationships, leftMetadata
);
}
if (relationshipVersioningUtils.otherSideIsLatest(false, relationship.getLatestVersionStatus())) {
shiftSiblings(
relationship, false, oldRightPlace, movedUpRight, insertRight, deletedFromRight,
rightRelationships, rightMetadata
);
}
//If useForPlace for the rightwardType is false for the relationshipType,
// we need to sort the relationships here based on the rightplace.
if (!virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), false)) {
if (!rightRelationships.isEmpty()) {
rightRelationships.sort(Comparator.comparingInt(Relationship::getRightPlace));
for (int i = 0; i < rightRelationships.size(); i++) {
rightRelationships.get(i).setRightPlace(i);
}
relationship.setRightPlace(rightRelationships.size());
} else {
relationship.setRightPlace(0);
}
updateItem(context, leftItem);
updateItem(context, rightItem);
} else {
updateItem(context, rightItem);
}
context.restoreAuthSystemState();
}
/**
* Return the MDVs in the Item's MDF corresponding to the given Relationship.
* Return an empty list if the Relationship isn't mapped to any MDF
* or if the mapping is configured with useForPlace=false.
*
* This returns actual metadata (not virtual) which in the same metadata field as the useForPlace.
* For a publication with 2 author relationships and 3 plain text dc.contributor.author values,
* it would return the 3 plain text dc.contributor.author values.
* For a person related to publications, it would return an empty list.
*/
private List<MetadataValue> getSiblingMetadata(
Item item, Relationship relationship, boolean isLeft
) {
List<MetadataValue> metadata = new ArrayList<>();
if (virtualMetadataPopulator.isUseForPlaceTrueForRelationshipType(relationship.getRelationshipType(), isLeft)) {
HashMap<String, VirtualMetadataConfiguration> mapping;
if (isLeft) {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getLeftwardType());
} else {
mapping = virtualMetadataPopulator.getMap().get(relationship.getRelationshipType().getRightwardType());
}
if (mapping != null) {
for (String mdf : mapping.keySet()) {
metadata.addAll(
// Make sure we're only looking at database MDVs; if the relationship currently overlaps
// one of these, its virtual MDV will overwrite the database MDV in itemService.getMetadata()
// The relationship pass should be sufficient to move any sibling virtual MDVs.
item.getMetadata()
.stream()
.filter(mdv -> mdv.getMetadataField().toString().equals(mdf.replace(".", "_")))
.collect(Collectors.toList())
);
}
}
}
return metadata;
}
/**
* Set the left/right place of a Relationship
* - To a new place in case it's being moved
* - Resolve -1 to the actual last place based on the places of its sibling Relationships and/or MDVs
* and determine if it has been moved up in the list.
*
* Examples:
* - Insert a Relationship at place 3
* newPlace starts out as null and is not updated. Return movedUp=false
* - Insert a Relationship at place -1
* newPlace starts out as null and is resolved to e.g. 6. Update the Relationship and return movedUp=false
* - Move a Relationship from place 4 to 2
* Update the Relationship and return movedUp=false.
* - Move a Relationship from place 2 to -1
* newPlace starts out as -1 and is resolved to e.g. 5. Update the relationship and return movedUp=true.
* - Remove a relationship from place 1
* Return movedUp=false
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param newPlace The new place for this Relationship. Will be null on insert/delete.
* @return true if the Relationship was moved and newPlace > oldPlace
*/
private boolean resolveRelationshipPlace(
Relationship relationship, boolean isLeft,
List<Relationship> relationships, List<MetadataValue> metadata,
int oldPlace, Integer newPlace
) {
boolean movedUp = false;
if (newPlace != null) {
// We're moving an existing Relationship...
if (newPlace == -1) {
// ...to the end of the list
int nextPlace = getNextPlace(relationships, metadata, isLeft);
if (nextPlace == oldPlace) {
// If this Relationship is already at the end, do nothing.
newPlace = oldPlace;
} else {
// Subtract 1 from the next place since we're moving, not inserting and
// the total number of Relationships stays the same.
newPlace = nextPlace - 1;
}
}
if (newPlace > oldPlace) {
// ...up the list. We have to keep track of this in order to shift correctly later on
movedUp = true;
}
} else if (oldPlace == -1) {
// We're _not_ moving an existing Relationship. The newPlace is already set in the Relationship object.
// We only need to resolve it to the end of the list if it's set to -1, otherwise we can just keep it as is.
newPlace = getNextPlace(relationships, metadata, isLeft);
}
if (newPlace != null) {
setPlace(relationship, isLeft, newPlace);
}
return movedUp;
}
/**
* Return the index of the next place in a list of Relationships and Metadata.
* By not relying on the size of both lists we can support one-to-many virtual MDV mappings.
* @param isLeft whether to take the left or right place of each Relationship
*/
private int getNextPlace(List<Relationship> relationships, List<MetadataValue> metadata, boolean isLeft) {
return Stream.concat(
metadata.stream().map(MetadataValue::getPlace),
relationships.stream().map(r -> getPlace(r, isLeft))
).max(Integer::compare)
.map(integer -> integer + 1)
.orElse(0);
}
/**
* Adjust the left/right place of sibling Relationships and MDVs
*
* Examples: with sibling Relationships R,S,T and metadata a,b,c
* - Insert T at place 1 aRbSc -> a T RbSc
* Shift all siblings with place >= 1 one place to the right
* - Delete R from place 2 aT R bSc -> aTbSc
* Shift all siblings with place > 2 one place to the left
* - Move S from place 3 to place 2 (movedUp=false) aTb S c -> aT S bc
* Shift all siblings with 2 < place <= 3 one place to the right
* - Move T from place 1 to place 3 (movedUp=true) a T Sbc -> aSb T c
* Shift all siblings with 1 < place <= 3 one place to the left
*
* @param relationship the Relationship that's being updated
* @param isLeft whether to consider the left side of the Relationship.
* This method should be called twice, once with isLeft=true and once with isLeft=false.
* Make sure this matches the provided relationships/metadata/oldPlace/newPlace.
* @param oldPlace the previous place for this Relationship, in case it has been moved.
* Otherwise, the current place of a deleted Relationship
* or the place a Relationship has been inserted.
* @param movedUp if this Relationship has been moved up the list, e.g. from place 2 to place 4
* @param deleted whether this Relationship has been deleted
* @param relationships the list of sibling Relationships
* @param metadata the list of sibling MDVs
*/
private void shiftSiblings(
Relationship relationship, boolean isLeft, int oldPlace, boolean movedUp, boolean inserted, boolean deleted,
List<Relationship> relationships, List<MetadataValue> metadata
) {
int newPlace = getPlace(relationship, isLeft);
for (Relationship sibling : relationships) {
// NOTE: If and only if the other side of the relationship has "latest" status, the relationship will appear
// as a metadata value on the item at the current side (indicated by isLeft) of the relationship.
//
// Example: volume <----> issue (LEFT_ONLY)
// => LEFT_ONLY means that the volume has "latest" status, but the issue does NOT have "latest" status
// => the volume will appear in the metadata of the issue,
// but the issue will NOT appear in the metadata of the volume
//
// This means that the other side of the relationship has to have "latest" status, otherwise this
// relationship is NOT relevant for place calculation.
if (relationshipVersioningUtils.otherSideIsLatest(isLeft, sibling.getLatestVersionStatus())) {
int siblingPlace = getPlace(sibling, isLeft);
if (
(deleted && siblingPlace > newPlace)
// If the relationship was deleted, all relationships after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
|| (movedUp && siblingPlace <= newPlace && siblingPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all relationships
// with place > 2 (the old place) and <= to 5 should shift left
) {
setPlace(sibling, isLeft, siblingPlace - 1);
} else if (
(inserted && siblingPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && siblingPlace >= newPlace && siblingPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
setPlace(sibling, isLeft, siblingPlace + 1);
}
}
}
for (MetadataValue mdv : metadata) {
// NOTE: Plain text metadata values should ALWAYS be included in the place calculation,
// because they are by definition only visible/relevant to the side of the relationship
// that we are currently processing.
int mdvPlace = mdv.getPlace();
if (
(deleted && mdvPlace > newPlace)
// If the relationship was deleted, all metadata after it should shift left
// We must make the distinction between deletes and moves because for inserts oldPlace == newPlace
// If the reltionship was copied to metadata on deletion:
// - the plain text can be after the relationship (in which case it's moved forward again)
// - or before the relationship (in which case it remains in place)
|| (movedUp && mdvPlace <= newPlace && mdvPlace > oldPlace)
// If the relationship was moved up e.g. from place 2 to 5, all metadata
// with place > 2 (the old place) and <= to 5 should shift left
) {
mdv.setPlace(mdvPlace - 1);
} else if (
(inserted && mdvPlace >= newPlace)
// If the relationship was inserted, all relationships starting from that place should shift right
// We must make the distinction between inserts and moves because for inserts oldPlace == newPlace
|| (!movedUp && mdvPlace >= newPlace && mdvPlace < oldPlace)
// If the relationship was moved down e.g. from place 5 to 2, all relationships
// with place >= 2 and < 5 (the old place) should shift right
) {
mdv.setPlace(mdvPlace + 1);
}
}
}
private int getPlace(Relationship relationship, boolean isLeft) {
if (isLeft) {
return relationship.getLeftPlace();
} else {
return relationship.getRightPlace();
}
}
private void setPlace(Relationship relationship, boolean isLeft, int place) {
if (isLeft) {
relationship.setLeftPlace(place);
} else {
relationship.setRightPlace(place);
}
}
@Override
@@ -187,16 +527,6 @@ public class RelationshipServiceImpl implements RelationshipService {
itemService.update(context, relatedItem);
}
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextLeftPlaceByLeftItem(context, item);
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
return relationshipDAO.findNextRightPlaceByRightItem(context, item);
}
private boolean isRelationshipValidToCreate(Context context, Relationship relationship) throws SQLException {
RelationshipType relationshipType = relationship.getRelationshipType();
@@ -212,15 +542,19 @@ public class RelationshipServiceImpl implements RelationshipService {
logRelationshipTypeDetailsForError(relationshipType);
return false;
}
if (!verifyMaxCardinality(context, relationship.getLeftItem(),
if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.LEFT_ONLY)
&& !verifyMaxCardinality(context, relationship.getLeftItem(),
relationshipType.getLeftMaxCardinality(), relationshipType, true)) {
//If RIGHT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the left item has more" +
" relationships than the left max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType);
return false;
}
if (!verifyMaxCardinality(context, relationship.getRightItem(),
if (!relationship.getLatestVersionStatus().equals(LatestVersionStatus.RIGHT_ONLY)
&& !verifyMaxCardinality(context, relationship.getRightItem(),
relationshipType.getRightMaxCardinality(), relationshipType, false)) {
//If LEFT_ONLY => it's a copied relationship, and the count can be ignored
log.warn("The relationship has been deemed invalid since the right item has more" +
" relationships than the right max cardinality allows after we'd store this relationship");
logRelationshipTypeDetailsForError(relationshipType);
@@ -279,14 +613,22 @@ public class RelationshipServiceImpl implements RelationshipService {
}
@Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset,
boolean excludeTilted) throws SQLException {
public List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted
) throws SQLException {
return findByItem(context, item, limit, offset, excludeTilted, true);
}
List<Relationship> list = relationshipDAO.findByItem(context, item, limit, offset, excludeTilted);
@Override
public List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
List<Relationship> list =
relationshipDAO.findByItem(context, item, limit, offset, excludeTilted, excludeNonLatest);
list.sort((o1, o2) -> {
int relationshipType = o1.getRelationshipType().getLeftwardType()
.compareTo(o2.getRelationshipType().getLeftwardType());
.compareTo(o2.getRelationshipType().getLeftwardType());
if (relationshipType != 0) {
return relationshipType;
} else {
@@ -377,7 +719,7 @@ public class RelationshipServiceImpl implements RelationshipService {
if (authorizeService.authorizeActionBoolean(context, relationship.getLeftItem(), Constants.WRITE) ||
authorizeService.authorizeActionBoolean(context, relationship.getRightItem(), Constants.WRITE)) {
relationshipDAO.delete(context, relationship);
updatePlaceInRelationship(context, relationship);
updatePlaceInRelationship(context, relationship, null, null, false, false);
updateItemsInRelationship(context, relationship);
} else {
throw new AuthorizeException(
@@ -450,7 +792,7 @@ public class RelationshipServiceImpl implements RelationshipService {
+ item.getID() + " due to " + currentDepth + " depth");
return;
}
String entityTypeStringFromMetadata = relationshipMetadataService.getEntityTypeStringFromMetadata(item);
String entityTypeStringFromMetadata = itemService.getEntityTypeLabel(item);
EntityType actualEntityType = entityTypeService.findByEntityType(context, entityTypeStringFromMetadata);
// Get all types of relations for the current item
List<RelationshipType> relationshipTypes = relationshipTypeService.findByEntityType(context, actualEntityType);
@@ -510,6 +852,9 @@ public class RelationshipServiceImpl implements RelationshipService {
/**
* Converts virtual metadata from RelationshipMetadataValue objects to actual item metadata.
* The resulting MDVs are added in front or behind the Relationship's virtual MDVs.
* The Relationship's virtual MDVs may be shifted right, and all subsequent metadata will be shifted right.
* So this method ensures the places are still valid.
*
* @param context The relevant DSpace context
* @param relationship The relationship containing the left and right items
@@ -520,13 +865,20 @@ public class RelationshipServiceImpl implements RelationshipService {
boolean copyToRightItem)
throws SQLException, AuthorizeException {
if (copyToLeftItem) {
String entityTypeString = relationshipMetadataService
.getEntityTypeStringFromMetadata(relationship.getLeftItem());
String entityTypeString = itemService.getEntityTypeLabel(relationship.getLeftItem());
List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getLeftItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getLeftItem(),
// This adds the plain text metadata values on the same spot as the virtual values.
// This will be overruled in org.dspace.content.DSpaceObjectServiceImpl.update
// in the line below but it's not important whether the plain text or virtual values end up on top.
// The virtual values will eventually be deleted, and the others shifted
// This is required because addAndShiftRightMetadata has issues on metadata fields containing
// relationship values which are not useForPlace, while the relationhip type has useForPlace
// E.g. when using addAndShiftRightMetadata on relation.isAuthorOfPublication, it will break the order
// from dc.contributor.author
itemService.addMetadata(context, relationship.getLeftItem(),
relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(),
@@ -535,16 +887,16 @@ public class RelationshipServiceImpl implements RelationshipService {
relationshipMetadataValue.getValue(), null, -1,
relationshipMetadataValue.getPlace());
}
//This will ensure the new values no longer overlap, but won't break the order
itemService.update(context, relationship.getLeftItem());
}
if (copyToRightItem) {
String entityTypeString = relationshipMetadataService
.getEntityTypeStringFromMetadata(relationship.getRightItem());
String entityTypeString = itemService.getEntityTypeLabel(relationship.getRightItem());
List<RelationshipMetadataValue> relationshipMetadataValues =
relationshipMetadataService.findRelationshipMetadataValueForItemRelationship(context,
relationship.getRightItem(), entityTypeString, relationship, true);
for (RelationshipMetadataValue relationshipMetadataValue : relationshipMetadataValues) {
itemService.addAndShiftRightMetadata(context, relationship.getRightItem(),
itemService.addMetadata(context, relationship.getRightItem(),
relationshipMetadataValue.getMetadataField().
getMetadataSchema().getName(),
relationshipMetadataValue.getMetadataField().getElement(),
@@ -638,22 +990,46 @@ public class RelationshipServiceImpl implements RelationshipService {
public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType)
throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, -1, -1);
return findByItemAndRelationshipType(context, item, relationshipType, -1, -1, true);
}
@Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, int limit, int offset)
throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, limit, offset);
return findByItemAndRelationshipType(context, item, relationshipType, limit, offset, true);
}
@Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, boolean isLeft,
int limit, int offset)
throws SQLException {
return relationshipDAO.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset);
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.findByItemAndRelationshipType(context, item, relationshipType, limit, offset, excludeNonLatest);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset
) throws SQLException {
return findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, true);
}
@Override
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.findByItemAndRelationshipType(context, item, relationshipType, isLeft, limit, offset, excludeNonLatest);
}
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
return relationshipDAO
.findByLatestItemAndRelationshipType(context, latestItem, relationshipType, isLeft);
}
@Override
@@ -690,7 +1066,14 @@ public class RelationshipServiceImpl implements RelationshipService {
@Override
public int countByItem(Context context, Item item) throws SQLException {
return relationshipDAO.countByItem(context, item);
return countByItem(context, item, false, true);
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO.countByItem(context, item, excludeTilted, excludeNonLatest);
}
@Override
@@ -699,9 +1082,18 @@ public class RelationshipServiceImpl implements RelationshipService {
}
@Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType,
boolean isLeft) throws SQLException {
return relationshipDAO.countByItemAndRelationshipType(context, item, relationshipType, isLeft);
public int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
return countByItemAndRelationshipType(context, item, relationshipType, isLeft, true);
}
@Override
public int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
return relationshipDAO
.countByItemAndRelationshipType(context, item, relationshipType, isLeft, excludeNonLatest);
}
@Override

View File

@@ -128,19 +128,23 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
Optional<MetadataValue> colEntityType = getDSpaceEntityType(collection);
Optional<MetadataValue> templateItemEntityType = getDSpaceEntityType(templateItem);
if (colEntityType.isPresent() && templateItemEntityType.isPresent() &&
if (template && colEntityType.isPresent() && templateItemEntityType.isPresent() &&
!StringUtils.equals(colEntityType.get().getValue(), templateItemEntityType.get().getValue())) {
throw new IllegalStateException("The template item has entity type : (" +
templateItemEntityType.get().getValue() + ") different than collection entity type : " +
colEntityType.get().getValue());
}
if (colEntityType.isPresent() && templateItemEntityType.isEmpty()) {
if (template && colEntityType.isPresent() && templateItemEntityType.isEmpty()) {
MetadataValue original = colEntityType.get();
MetadataField metadataField = original.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), original.getLanguage(), original.getValue());
// NOTE: dspace.entity.type = <blank> does not make sense
// the collection entity type is by default blank when a collection is first created
if (StringUtils.isNotBlank(original.getValue())) {
itemService.addMetadata(context, item, metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), original.getLanguage(), original.getValue());
}
}
if (template && (templateItem != null)) {

View File

@@ -32,8 +32,22 @@ public interface ItemDAO extends DSpaceObjectLegacySupportDAO<Item> {
public Iterator<Item> findAll(Context context, boolean archived, int limit, int offset) throws SQLException;
@Deprecated
public Iterator<Item> findAll(Context context, boolean archived, boolean withdrawn) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/**
* Find all Items modified since a Date.
*

View File

@@ -14,6 +14,7 @@ import java.util.UUID;
import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
@@ -28,53 +29,38 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
/**
* This method returns a list of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted If true, excludes tilted relationships
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException;
List<Relationship> findByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param limit paging limit
* @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param limit paging limit
* @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted)
throws SQLException;
/**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on leftItem
* @return The next integer to be used for the leftplace of a relationship with the given item
* as a left item
* @throws SQLException If something goes wrong
*/
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException;
/**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem
*
* @param context The relevant DSpace context
* @param item The item to be matched on rightItem
* @return The next integer to be used for the rightplace of a relationship with the given item
* as a right item
* @throws SQLException If something goes wrong
*/
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException;
List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationship objects for the given RelationshipType object.
@@ -108,34 +94,69 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
* It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property
* @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on
* @param limit paging limit
* @param offset paging offset
* @param item item to filter by
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType,
Integer limit, Integer offset) throws SQLException;
List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationship objects for the given RelationshipType object.
* It will construct a list of all Relationship objects that have the given RelationshipType object
* as the relationshipType property
* @param context The relevant DSpace context
* @param item item to filter by
* @param relationshipType The RelationshipType object to be checked on
* @param isLeft Is item left or right
* @param limit paging limit
* @param offset paging offset
* @param item item to filter by
* @param isLeft Is item left or right
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return A list of Relationship objects that have the given RelationshipType object as the
* relationshipType property
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType,
boolean isLeft, Integer limit, Integer offset)
throws SQLException;
List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/**
* This method returns a list of Relationship objects for the given typeName
@@ -183,28 +204,34 @@ public interface RelationshipDAO extends GenericDAO<Relationship> {
/**
* This method returns a count of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
int countByItem(Context context, Item item) throws SQLException;
int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/**
* Count total number of relationships (rows in relationship table) by an item and a relationship type and a boolean
* indicating whether the item should be the leftItem or the rightItem
*
* @param context context
* @param relationshipType relationship type to filter by
* @param item item to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not
* @param context context
* @param relationshipType relationship type to filter by
* @param item item to filter by
* @param isLeft indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return total count
* @throws SQLException if database error
*/
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft)
throws SQLException;
int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/**
* Count total number of relationships (rows in relationship table) given a typeName

View File

@@ -79,6 +79,20 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
return iterate(query);
}
@Override
public Iterator<Item> findAllRegularItems(Context context) throws SQLException {
// NOTE: This query includes archived items, withdrawn items and older versions of items.
// It does not include workspace, workflow or template items.
Query query = createQuery(
context,
"SELECT i FROM Item as i " +
"LEFT JOIN Version as v ON i = v.item " +
"WHERE i.inArchive=true or i.withdrawn=true or (i.inArchive=false and v.id IS NOT NULL) " +
"ORDER BY i.id"
);
return iterate(query);
}
@Override
public Iterator<Item> findAll(Context context, boolean archived,
boolean withdrawn, boolean discoverable, Date lastModified)

View File

@@ -11,17 +11,22 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.persistence.Query;
import javax.persistence.Tuple;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import org.dspace.content.Item;
import org.dspace.content.Item_;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.RelationshipType_;
import org.dspace.content.Relationship_;
import org.dspace.content.dao.RelationshipDAO;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.AbstractHibernateDAO;
@@ -30,95 +35,152 @@ import org.dspace.core.Context;
public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> implements RelationshipDAO {
@Override
public List<Relationship> findByItem(Context context, Item item, boolean excludeTilted) throws SQLException {
return findByItem(context, item, -1, -1, excludeTilted);
public List<Relationship> findByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
return findByItem(context, item, -1, -1, excludeTilted, excludeNonLatest);
}
@Override
public List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset,
boolean excludeTilted) throws SQLException {
public List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
CriteriaQuery<Relationship> criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
if (excludeTilted) {
// If this item is the left item,
// return relationships for types which are not tilted right (tilted is either left nor null)
// If this item is the right item,
// return relationships for types which are not tilted left (tilted is either right nor null)
criteriaQuery
.where(criteriaBuilder.or(
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted)),
criteriaBuilder.notEqual(relationshipRoot
.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted), RelationshipType.Tilted.RIGHT))),
criteriaBuilder.and(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item),
criteriaBuilder.or(
criteriaBuilder.isNull(relationshipRoot.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted)),
criteriaBuilder.notEqual(relationshipRoot
.get(Relationship_.relationshipType)
.get(RelationshipType_.tilted), RelationshipType.Tilted.LEFT)))));
} else {
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)));
}
criteriaQuery.where(
criteriaBuilder.or(
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
)
);
return list(context, criteriaQuery, false, Relationship.class, limit, offset);
}
@Override
public int countByItem(Context context, Item item)
throws SQLException {
/**
* Get the predicate for a criteria query that selects relationships by their left item.
* @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getLeftItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item)
);
if (excludeTilted) {
// if this item is the left item,
// return relationships for types which are NOT tilted right (tilted is either left nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.RIGHT
)
)
);
}
if (excludeNonLatest) {
// if this item is the left item,
// return relationships for which the right item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
/**
* Get the predicate for a criteria query that selects relationships by their right item.
* @param criteriaBuilder the criteria builder.
* @param relationshipRoot the relationship root.
* @param item the item that is being searched for.
* @param excludeTilted if true, exclude tilted relationships.
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant.
* @return a predicate that satisfies the given restrictions.
*/
protected Predicate getRightItemPredicate(
CriteriaBuilder criteriaBuilder, Root<Relationship> relationshipRoot, Item item,
boolean excludeTilted, boolean excludeNonLatest
) {
List<Predicate> predicates = new ArrayList<>();
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)
);
if (excludeTilted) {
// if this item is the right item,
// return relationships for types which are NOT tilted left (tilted is either right nor null)
predicates.add(
criteriaBuilder.or(
criteriaBuilder.isNull(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted)
),
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.relationshipType).get(RelationshipType_.tilted),
RelationshipType.Tilted.LEFT
)
)
);
}
if (excludeNonLatest) {
// if this item is the right item,
// return relationships for which the left item is the "latest" version that is relevant.
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
}
return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
}
@Override
public int countByItem(
Context context, Item item, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.or(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)));
criteriaQuery.where(
criteriaBuilder.or(
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, excludeTilted, excludeNonLatest)
)
);
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
}
@Override
public int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getLeftPlace() - o1.getLeftPlace());
if (!list.isEmpty()) {
return list.get(0).getLeftPlace() + 1;
} else {
return 0;
}
}
@Override
public int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item));
List<Relationship> list = list(context, criteriaQuery, false, Relationship.class, -1, -1);
list.sort((o1, o2) -> o2.getRightPlace() - o1.getRightPlace());
if (!list.isEmpty()) {
return list.get(0).getRightPlace() + 1;
} else {
return 0;
}
}
@Override
public List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType)
throws SQLException {
@@ -140,49 +202,132 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
}
@Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, Integer limit,
Integer offset)
throws SQLException {
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType),
relationshipType), criteriaBuilder.or
(criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item)));
criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
criteriaBuilder.or(
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
)
);
return list(context, criteriaQuery, true, Relationship.class, limit, offset);
}
@Override
public List<Relationship> findByItemAndRelationshipType(Context context, Item item,
RelationshipType relationshipType, boolean isLeft,
Integer limit, Integer offset)
throws SQLException {
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, Integer limit, Integer offset,
boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
if (isLeft) {
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType),
relationshipType),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item));
criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
);
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.leftPlace)));
} else {
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType),
relationshipType),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item));
criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
);
criteriaQuery.orderBy(criteriaBuilder.asc(relationshipRoot.get(Relationship_.rightPlace)));
}
return list(context, criteriaQuery, true, Relationship.class, limit, offset);
}
@Override
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException {
final String relationshipIdAlias = "relationshipId";
final String itemUuidAlias = "itemUuid";
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
ArrayList<Predicate> predicates = new ArrayList<>();
// all relationships should have the specified relationship type
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType)
);
if (isLeft) {
// match relationships based on the left item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), latestItem)
);
// the left item has to have "latest status" => accept BOTH and LEFT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.RIGHT_ONLY
)
);
// return the UUIDs of the right item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.rightItem).get(Item_.id).alias(itemUuidAlias)
);
} else {
// match relationships based on the right item
predicates.add(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), latestItem)
);
// the right item has to have "latest status" => accept BOTH and RIGHT_ONLY
predicates.add(
criteriaBuilder.notEqual(
relationshipRoot.get(Relationship_.LATEST_VERSION_STATUS),
Relationship.LatestVersionStatus.LEFT_ONLY
)
);
// return the UUIDs of the left item
criteriaQuery.multiselect(
relationshipRoot.get(Relationship_.id).alias(relationshipIdAlias),
relationshipRoot.get(Relationship_.leftItem).get(Item_.id).alias(itemUuidAlias)
);
}
// all predicates are chained with the AND operator
criteriaQuery.where(predicates.toArray(new Predicate[]{}));
// deduplicate result
criteriaQuery.distinct(true);
// execute query
Query query = this.getHibernateSession(context).createQuery(criteriaQuery);
query.setHint("org.hibernate.cacheable", true);
List<?> resultList = query.getResultList();
// convert types
return resultList.stream()
.map(Tuple.class::cast)
.map(t -> new ItemUuidAndRelationshipId(
(UUID) t.get(itemUuidAlias),
(Integer) t.get(relationshipIdAlias)
))
.collect(Collectors.toList());
}
@Override
public List<Relationship> findByTypeName(Context context, String typeName)
throws SQLException {
@@ -228,24 +373,26 @@ public class RelationshipDAOImpl extends AbstractHibernateDAO<Relationship> impl
}
@Override
public int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType,
boolean isLeft) throws SQLException {
public int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException {
CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context);
CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Relationship.class);
Root<Relationship> relationshipRoot = criteriaQuery.from(Relationship.class);
criteriaQuery.select(relationshipRoot);
if (isLeft) {
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType),
relationshipType),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.leftItem), item));
criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
getLeftItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
);
} else {
criteriaQuery
.where(criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType),
relationshipType),
criteriaBuilder.equal(relationshipRoot.get(Relationship_.rightItem), item));
criteriaQuery.where(
criteriaBuilder.equal(relationshipRoot.get(Relationship_.relationshipType), relationshipType),
getRightItemPredicate(criteriaBuilder, relationshipRoot, item, false, excludeNonLatest)
);
}
return count(context, criteriaQuery, criteriaBuilder, relationshipRoot);
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.dao.pojo;
import java.util.UUID;
import org.dspace.content.Relationship;
import org.dspace.content.dao.RelationshipDAO;
import org.springframework.lang.NonNull;
/**
* Used by {@link RelationshipDAO#findByLatestItemAndRelationshipType} to avoid creating {@link Relationship}s.
*/
public class ItemUuidAndRelationshipId {
private final UUID itemUuid;
private final int relationshipId;
public ItemUuidAndRelationshipId(@NonNull UUID itemUuid, @NonNull int relationshipId) {
this.itemUuid = itemUuid;
this.relationshipId = relationshipId;
}
public UUID getItemUuid() {
return this.itemUuid;
}
public int getRelationshipId() {
return this.relationshipId;
}
}

View File

@@ -21,6 +21,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
@@ -111,8 +112,22 @@ public interface ItemService
* @return an iterator over the items in the archive.
* @throws SQLException if database error
*/
@Deprecated
public Iterator<Item> findAllUnfiltered(Context context) throws SQLException;
/**
* Find all items that are:
* - NOT in the workspace
* - NOT in the workflow
* - NOT a template item for e.g. a collection
*
* This implies that the result also contains older versions of items and withdrawn items.
* @param context the DSpace context.
* @return iterator over all regular items.
* @throws SQLException if database error.
*/
public Iterator<Item> findAllRegularItems(Context context) throws SQLException;
/**
* Find all the items in the archive by a given submitter. The order is
* indeterminate. Only items with the "in archive" flag set are included.
@@ -813,12 +828,20 @@ public interface ItemService
*/
public List<MetadataValue> getMetadata(Item item, String schema, String element, String qualifier,
String lang, boolean enableVirtualMetadata);
/**
* Returns the item's entity type, if any.
*
* @param item the item
* @return the entity type as string, if any
* Retrieve the label of the entity type of the given item.
* @param item the item.
* @return the label of the entity type, taken from the item metadata, or null if not found.
*/
public String getEntityType(Item item);
public String getEntityTypeLabel(Item item);
/**
* Retrieve the entity type of the given item.
* @param context the DSpace context.
* @param item the item.
* @return the entity type of the given item, or null if not found.
*/
public EntityType getEntityType(Context context, Item item) throws SQLException;
}

View File

@@ -14,7 +14,9 @@ import java.util.UUID;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.Relationship.LatestVersionStatus;
import org.dspace.content.RelationshipType;
import org.dspace.content.dao.pojo.ItemUuidAndRelationshipId;
import org.dspace.core.Context;
import org.dspace.service.DSpaceCRUDService;
@@ -49,6 +51,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
List<Relationship> findByItem(Context context, Item item, Integer limit, Integer offset, boolean excludeTilted)
throws SQLException;
/**
* Retrieves the list of Relationships currently in the system for which the given Item is either
* a leftItem or a rightItem object
* @param context The relevant DSpace context
* @param item The Item that has to be the left or right item for the relationship to be
* included in the list
* @param limit paging limit
* @param offset paging offset
* @param excludeTilted If true, excludes tilted relationships
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of relationships for which each relationship adheres to the above
* listed constraint
* @throws SQLException If something goes wrong
*/
List<Relationship> findByItem(
Context context, Item item, Integer limit, Integer offset, boolean excludeTilted, boolean excludeNonLatest
) throws SQLException;
/**
* Retrieves the full list of relationships currently in the system
* @param context The relevant DSpace context
@@ -79,30 +100,54 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
public Relationship create(Context context, Relationship relationship) throws SQLException, AuthorizeException;
/**
* This method returns the next leftplace integer to use for a relationship with this item as the leftItem
* Move the given relationship to a new leftPlace and/or rightPlace.
*
* @param context The relevant DSpace context
* @param item The item that has to be the leftItem of a relationship for it to qualify
* @return The next integer to be used for the leftplace of a relationship with the given item
* as a left item
* @throws SQLException If something goes wrong
* This will
* 1. verify whether the move is authorized
* 2. move the relationship to the specified left/right place
* 3. update the left/right place of other relationships and/or metadata in order to resolve the move without
* leaving any gaps
*
* At least one of the new places should be non-null, otherwise no changes will be made.
*
* @param context The relevant DSpace context
* @param relationship The Relationship to move
* @param newLeftPlace The value to set the leftPlace of this Relationship to
* @param newRightPlace The value to set the rightPlace of this Relationship to
* @return The moved relationship with updated place variables
* @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/
int findNextLeftPlaceByLeftItem(Context context, Item item) throws SQLException;
Relationship move(Context context, Relationship relationship, Integer newLeftPlace, Integer newRightPlace)
throws SQLException, AuthorizeException;
/**
* This method returns the next rightplace integer to use for a relationship with this item as the rightItem
* Move the given relationship to a new leftItem and/or rightItem.
*
* @param context The relevant DSpace context
* @param item The item that has to be the rightitem of a relationship for it to qualify
* @return The next integer to be used for the rightplace of a relationship with the given item
* as a right item
* @throws SQLException If something goes wrong
* This will
* 1. move the relationship to the last place in its current left or right Item. This ensures that we don't leave
* any gaps when moving the relationship to a new Item.
* If only one of the relationship's Items is changed,the order of relationships and metadatain the other
* will not be affected
* 2. insert the relationship into the new Item(s)
*
* At least one of the new Items should be non-null, otherwise no changes will be made.
*
* @param context The relevant DSpace context
* @param relationship The Relationship to move
* @param newLeftItem The value to set the leftItem of this Relationship to
* @param newRightItem The value to set the rightItem of this Relationship to
* @return The moved relationship with updated left/right Items variables
* @throws SQLException If something goes wrong
* @throws AuthorizeException If the user is not authorized to update the Relationship or its Items
*/
int findNextRightPlaceByRightItem(Context context, Item item) throws SQLException;
Relationship move(Context context, Relationship relationship, Item newLeftItem, Item newRightItem)
throws SQLException, AuthorizeException;
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -117,6 +162,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -131,6 +177,24 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong
*/
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, int limit, int offset, boolean excludeNonLatest
) throws SQLException;
/**
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
@@ -145,17 +209,51 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
throws SQLException;
/**
* This method will update the place for the Relationship and all other relationships found by the items and
* relationship type of the given Relationship. It will give this Relationship the last place in both the
* left and right place determined by querying for the list of leftRelationships and rightRelationships
* by the leftItem, rightItem and relationshipType of the given Relationship.
* @param context The relevant DSpace context
* @param relationship The Relationship object that will have it's place updated and that will be used
* to retrieve the other relationships whose place might need to be updated
* @throws SQLException If something goes wrong
* This method returns a list of Relationships for which the leftItem or rightItem is equal to the given
* Item object and for which the RelationshipType object is equal to the relationshipType property
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param item The Item object to be matched on the leftItem or rightItem for the relationship
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param isLeft Is the item left or right
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return The list of Relationship objects that have the given Item object as leftItem or rightItem and
* for which the relationshipType property is equal to the given RelationshipType
* @throws SQLException If something goes wrong
*/
public void updatePlaceInRelationship(Context context, Relationship relationship)
throws SQLException, AuthorizeException;
public List<Relationship> findByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, int limit, int offset,
boolean excludeNonLatest
) throws SQLException;
/**
* This method returns the UUIDs of all items that have a relationship with the given item, from the perspective
* of the other item. In other words, given a relationship with the given item, the given item should have
* "latest status" in order for the other item uuid to be returned.
*
* This method differs from the "excludeNonLatest" property in other methods,
* because in this method the current item should have "latest status" to return the other item,
* whereas with "excludeNonLatest" the other item should have "latest status" to be returned.
*
* This method is used to index items in solr; when searching for related items of one of the returned uuids,
* the given item should appear as a search result.
*
* NOTE: This method does not return {@link Relationship}s for performance, because doing so would eagerly fetch
* the items on both sides, which is unnecessary.
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type.
* @param context the DSpace context.
* @param latestItem the target item; only relationships where this item has "latest status" should be considered.
* @param relationshipType the relationship type for which relationships should be selected.
* @param isLeft whether the entity type of the item occurs on the left or right side of the relationship type.
* This is redundant in most cases, but necessary because relationship types my have
* the same entity type on both sides.
* @return a list containing pairs of relationship ids and item uuids.
* @throws SQLException if something goes wrong.
*/
public List<ItemUuidAndRelationshipId> findByLatestItemAndRelationshipType(
Context context, Item latestItem, RelationshipType relationshipType, boolean isLeft
) throws SQLException;
/**
* This method will update the given item's metadata order.
@@ -174,6 +272,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* This method returns a list of Relationship objects for which the relationshipType property is equal to the given
* RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @return The list of Relationship objects for which the given RelationshipType object is equal
@@ -185,6 +284,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context
* @param relationshipType The RelationshipType object that will be used to check the Relationship on
* @param limit paging limit
@@ -198,6 +298,27 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* This method is used to construct a Relationship object with all it's variables
* @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship
* @param relationshipType The RelationshipType object for the relationship
* @param leftPlace The leftPlace integer for the relationship
* @param rightPlace The rightPlace integer for the relationship
* @param leftwardValue The leftwardValue string for the relationship
* @param rightwardValue The rightwardValue string for the relationship
* @param latestVersionStatus The latestVersionStatus value for the relationship
* @return The created Relationship object with the given properties
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue, LatestVersionStatus latestVersionStatus
) throws AuthorizeException, SQLException;
/**
* This method is used to construct a Relationship object with all it's variables,
* except the latest version status
* @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship
@@ -210,14 +331,15 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
* @throws AuthorizeException If something goes wrong
* @throws SQLException If something goes wrong
*/
Relationship create(Context c, Item leftItem, Item rightItem, RelationshipType relationshipType,
int leftPlace, int rightPlace, String leftwardValue, String rightwardValue)
throws AuthorizeException, SQLException;
Relationship create(
Context c, Item leftItem, Item rightItem, RelationshipType relationshipType, int leftPlace, int rightPlace,
String leftwardValue, String rightwardValue
) throws AuthorizeException, SQLException;
/**
* This method is used to construct a Relationship object with all it's variables,
* except the leftward and rightward labels
* except the leftward label, rightward label and latest version status
* @param c The relevant DSpace context
* @param leftItem The leftItem Item object for the relationship
* @param rightItem The rightItem Item object for the relationship
@@ -267,7 +389,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
/**
* Count total number of relationships (rows in relationship table) by a relationship type
*
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context
* @param relationshipType relationship type to filter by
* @return total count
@@ -287,10 +409,25 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
*/
int countByItem(Context context, Item item) throws SQLException;
/**
* This method returns a count of Relationship objects that have the given Item object
* as a leftItem or a rightItem
* @param context The relevant DSpace context
* @param item The item that should be either a leftItem or a rightItem of all
* the Relationship objects in the returned list
* @param excludeTilted if true, excludes tilted relationships
* @param excludeNonLatest if true, exclude relationships for which the opposite item is not the latest version
* that is relevant
* @return The list of Relationship objects that contain either a left or a
* right item that is equal to the given item
* @throws SQLException If something goes wrong
*/
int countByItem(Context context, Item item, boolean excludeTilted, boolean excludeNonLatest) throws SQLException;
/**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not
*
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context
* @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side or not
@@ -300,6 +437,22 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
int countByItemAndRelationshipType(Context context, Item item, RelationshipType relationshipType, boolean isLeft)
throws SQLException;
/**
* Count total number of relationships (rows in relationship table) by a relationship type and a boolean indicating
* whether the relationship should contain the item on the left side or not
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context context
* @param relationshipType relationship type to filter by
* @param isLeft Indicating whether the counted Relationships should have the given Item on the left side
* @param excludeNonLatest If true, excludes all relationships for which the other item has a more recent version
* that is relevant for this relationship
* @return total count with the given parameters
* @throws SQLException if database error
*/
int countByItemAndRelationshipType(
Context context, Item item, RelationshipType relationshipType, boolean isLeft, boolean excludeNonLatest
) throws SQLException;
/**
* Count total number of relationships (rows in relationship table)
* by a relationship leftward or rightward typeName

View File

@@ -155,12 +155,11 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of distinct results as depicted by the CriteriaQuery and parameters
* @throws SQLException
*/
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults,
int offset) throws SQLException {
public List<T> list(
Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset
) throws SQLException {
criteriaQuery.distinct(true);
@SuppressWarnings("unchecked")
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
}
/**
@@ -183,12 +182,12 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
* @return A list of results determined by the CriteriaQuery and parameters
* @throws SQLException
*/
public List<T> list(Context context, CriteriaQuery criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults,
int offset, boolean distinct) throws SQLException {
public List<T> list(
Context context, CriteriaQuery<T> criteriaQuery, boolean cacheable, Class<T> clazz, int maxResults, int offset,
boolean distinct
) throws SQLException {
criteriaQuery.distinct(distinct);
@SuppressWarnings("unchecked")
List<T> result = (List<T>) executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
return result;
return executeCriteriaQuery(context, criteriaQuery, cacheable, maxResults, offset);
}
/**

View File

@@ -538,6 +538,36 @@ public class Context implements AutoCloseable {
}
}
/**
* Rollback the current transaction with the database, without persisting any
* pending changes. The database connection is not closed and can be reused
* afterwards.
*
* <b>WARNING: After calling this method all previously fetched entities are
* "detached" (pending changes are not tracked anymore). You have to reload all
* entities you still want to work with manually after this method call (see
* {@link Context#reloadEntity(ReloadableEntity)}).</b>
*
* @throws SQLException When rollbacking the transaction in the database fails.
*/
public void rollback() throws SQLException {
// If Context is no longer open/valid, just note that it has already been closed
if (!isValid()) {
log.info("rollback() was called on a closed Context object. No changes to abort.");
return;
}
try {
// Rollback ONLY if we have a database transaction, and it is NOT Read Only
if (!isReadOnly() && isTransactionAlive()) {
dbConnection.rollback();
reloadContextBoundEntities();
}
} finally {
events = null;
}
}
/**
* Close the context, without committing any of the changes performed using
* this context. The database connection is freed. No exception is thrown if

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.discovery;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
@@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer {
// collect Items, Collections, Communities that need indexing
private Set<IndexableObject> objectsToUpdate = new HashSet<>();
// collect freshly created Items that need indexing (requires pre-db status)
private Set<IndexableObject> createdItemsToUpdate = new HashSet<>();
// unique search IDs to delete
private Set<String> uniqueIdsToDelete = new HashSet<>();
@@ -65,6 +68,7 @@ public class IndexEventConsumer implements Consumer {
if (objectsToUpdate == null) {
objectsToUpdate = new HashSet<>();
uniqueIdsToDelete = new HashSet<>();
createdItemsToUpdate = new HashSet<>();
}
int st = event.getSubjectType();
@@ -143,6 +147,7 @@ public class IndexEventConsumer implements Consumer {
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
uniqueIdsToDelete.add(detail);
}
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
}
break;
@@ -162,7 +167,7 @@ public class IndexEventConsumer implements Consumer {
// also update the object in order to index mapped/unmapped Items
if (subject != null &&
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
}
}
break;
@@ -209,23 +214,11 @@ public class IndexEventConsumer implements Consumer {
}
// update the changed Items not deleted because they were on create list
for (IndexableObject iu : objectsToUpdate) {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
}
indexObject(ctx, iu, false);
}
// update the created Items with a pre-db status
for (IndexableObject iu : createdItemsToUpdate) {
indexObject(ctx, iu, true);
}
} finally {
if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) {
@@ -235,6 +228,27 @@ public class IndexEventConsumer implements Consumer {
// "free" the resources
objectsToUpdate.clear();
uniqueIdsToDelete.clear();
createdItemsToUpdate.clear();
}
}
}
private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false, preDb);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
}
}

View File

@@ -9,7 +9,9 @@ package org.dspace.discovery;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Map;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context;
/**
@@ -30,6 +32,17 @@ public interface IndexingService {
void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit) throws SQLException, SearchServiceException;
/**
* Index a given DSO
* @param context The DSpace Context
* @param dso The DSpace Object to index
* @param force Force update even if not stale
* @param commit Commit the changes
* @param preDb Add a "preDB" status to the index (only applicable to Items)
*/
void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException;
void unIndexContent(Context context, IndexableObject dso)
throws SQLException, IOException;
@@ -62,4 +75,15 @@ public interface IndexingService {
void optimize() throws SearchServiceException;
void buildSpellCheck() throws SearchServiceException, IOException;
/**
* Atomically update the index of a single field for an object
* @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for
* @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/
*/
void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String,Object> fieldModifier)
throws SolrServerException, IOException;
}

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import org.dspace.content.Item;
@@ -38,6 +39,7 @@ public interface SearchService {
DiscoverResult search(Context context, DiscoverQuery query)
throws SearchServiceException;
/**
* Convenient method to call @see #search(Context, DSpaceObject,
* DiscoverQuery, boolean) with includeWithdrawn=false
@@ -52,9 +54,22 @@ public interface SearchService {
DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException;
/**
* Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the
* results
*
* @param context DSpace context object
* @param dso a DSpace object to use as a scope of the search
* @param query the discovery query object
* @return an iterator iterating over all results from the search
* @throws SearchServiceException if search error
*/
Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException;
List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset,
int max, String... filterquery);
int max, String... filterquery);
/**
* Transforms the given string field and value into a filter query

View File

@@ -20,6 +20,7 @@ import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowItem;
@@ -170,4 +171,10 @@ public class SearchUtils {
DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName);
result.add(configurationExtra);
}
public static DiscoverQueryBuilder getQueryBuilder() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager
.getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class);
}
}

View File

@@ -8,6 +8,8 @@
package org.dspace.discovery;
import static java.util.stream.Collectors.joining;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.io.PrintWriter;
@@ -118,8 +120,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
/**
* If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index
@@ -166,6 +166,24 @@ public class SolrServiceImpl implements SearchService, IndexingService {
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
}
/**
* Update the given indexable object using a given service
* @param context The DSpace Context
* @param indexableObjectService The service to index the object with
* @param indexableObject The object to index
* @param preDB Add a "preDB" status to the document
*/
protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject,
boolean preDB) throws IOException, SQLException, SolrServerException {
if (preDB) {
final SolrInputDocument solrInputDocument =
indexableObjectService.buildNewDocument(context, indexableObject);
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} else {
update(context, indexableObjectService, indexableObject);
}
}
/**
* unIndex removes an Item, Collection, or Community
*
@@ -454,6 +472,16 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
@Override
public void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String, Object> fieldModifier)
throws SolrServerException, IOException {
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId);
solrInputDocument.addField(field, fieldModifier);
solrSearchCore.getSolr().add(solrInputDocument);
}
// //////////////////////////////////
// Private
// //////////////////////////////////
@@ -710,16 +738,21 @@ public class SolrServiceImpl implements SearchService, IndexingService {
discoveryQuery.addFilterQueries("location:l" + dso.getID());
} else if (dso instanceof IndexableItem) {
discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso.
getUniqueIndexID());
getUniqueIndexID());
}
}
return search(context, discoveryQuery);
}
@Override
public Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException {
return new SearchIterator(context, dso, query);
}
@Override
public DiscoverResult search(Context context, DiscoverQuery discoveryQuery )
public DiscoverResult search(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException {
try {
if (solrSearchCore.getSolr() == null) {
@@ -733,6 +766,72 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
/**
* This class implements an iterator over items that is specifically used to iterate over search results
*/
private class SearchIterator implements Iterator<Item> {
private Context context;
private DiscoverQuery discoverQuery;
private DiscoverResult discoverResult;
private IndexableObject dso;
private int absoluteCursor;
private int relativeCursor;
private int pagesize;
SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException {
this.context = context;
this.discoverQuery = discoverQuery;
this.absoluteCursor = discoverQuery.getStart();
initialise();
}
SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery)
throws SearchServiceException {
this.context = context;
this.dso = dso;
this.discoverQuery = discoverQuery;
initialise();
}
private void initialise() throws SearchServiceException {
this.relativeCursor = 0;
if (discoverQuery.getMaxResults() != -1) {
pagesize = discoverQuery.getMaxResults();
} else {
pagesize = 10;
}
discoverQuery.setMaxResults(pagesize);
this.discoverResult = search(context, dso, discoverQuery);
}
@Override
public boolean hasNext() {
return absoluteCursor < discoverResult.getTotalSearchResults();
}
@Override
public Item next() {
//paginate getting results from the discoverquery.
if (relativeCursor == pagesize) {
// get a new page of results when the last element of the previous page has been read
int offset = absoluteCursor;
// reset the position counter for getting element relativecursor on a page
relativeCursor = 0;
discoverQuery.setStart(offset);
try {
discoverResult = search(context, dso, discoverQuery);
} catch (SearchServiceException e) {
log.error("error while getting search results", e);
}
}
// get the element at position relativecursor on a page
IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor);
relativeCursor++;
absoluteCursor++;
return (Item) res.getIndexedObject();
}
}
protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException {
SolrQuery solrQuery = new SolrQuery();
@@ -753,6 +852,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
solrQuery.addField(STATUS_FIELD);
if (discoveryQuery.isSpellCheck()) {
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -903,11 +1003,14 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Enables solr to remove documents related to items not on database anymore (Stale)
// if maxAttemps is greater than 0 cleanup the index on each step
if (maxAttempts >= 0) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// avoid to process the response except if we are in the last allowed execution.
// When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount;
Object statusObj = doc.getFirstValue(STATUS_FIELD);
if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// avoid to process the response except if we are in the last allowed execution.
// When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount;
}
}
continue;
}
@@ -1389,6 +1492,28 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
@Override
public void indexContent(Context context, IndexableObject indexableObject, boolean force,
boolean commit, boolean preDb) throws SearchServiceException, SQLException {
if (preDb) {
try {
final IndexFactory indexableObjectFactory = indexObjectServiceFactory.
getIndexableObjectFactory(indexableObject);
if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) {
update(context, indexableObjectFactory, indexableObject, true);
log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID()));
}
} catch (IOException | SQLException | SolrServerException | SearchServiceException e) {
log.error(e.getMessage(), e);
}
} else {
indexContent(context, indexableObject, force);
}
if (commit) {
commit();
}
}
@Override
public void commit() throws SearchServiceException {
try {

View File

@@ -70,6 +70,11 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
return doc;
}
@Override
public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException {
return buildDocument(context, indexableObject);
}
@Override
public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument)
throws SQLException, IOException, SolrServerException {

View File

@@ -10,7 +10,6 @@ package org.dspace.discovery.indexobject;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
@@ -43,7 +42,6 @@ import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
import org.dspace.discovery.FullTextContentStreams;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
@@ -64,6 +62,9 @@ import org.dspace.handle.service.HandleService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.MultiFormatDateParser;
import org.dspace.util.SolrUtils;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.XmlWorkflowItemService;
import org.springframework.beans.factory.annotation.Autowired;
@@ -78,6 +79,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class);
public static final String VARIANTS_STORE_SEPARATOR = "###";
public static final String STORE_SEPARATOR = "\n|||\n";
public static final String STATUS_FIELD = "database_status";
public static final String STATUS_FIELD_PREDB = "predb";
@Autowired
@@ -96,11 +99,13 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
protected WorkflowItemIndexFactory workflowItemIndexFactory;
@Autowired
protected WorkspaceItemIndexFactory workspaceItemIndexFactory;
@Autowired
protected VersionHistoryService versionHistoryService;
@Override
public Iterator<IndexableItem> findAll(Context context) throws SQLException {
Iterator<Item> items = itemService.findAllUnfiltered(context);
Iterator<Item> items = itemService.findAllRegularItems(context);
return new Iterator<IndexableItem>() {
@Override
public boolean hasNext() {
@@ -139,6 +144,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
doc.addField("withdrawn", item.isWithdrawn());
doc.addField("discoverable", item.isDiscoverable());
doc.addField("lastModified", SolrUtils.getDateFormatter().format(item.getLastModified()));
doc.addField("latestVersion", isLatestVersion(context, item));
EPerson submitter = item.getSubmitter();
if (submitter != null) {
@@ -169,6 +175,51 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return doc;
}
/**
* Check whether the given item is the latest version.
* If the latest item cannot be determined, because either the version history or the latest version is not present,
* assume the item is latest.
* @param context the DSpace context.
* @param item the item that should be checked.
* @return true if the item is the latest version, false otherwise.
*/
protected boolean isLatestVersion(Context context, Item item) throws SQLException {
VersionHistory history = versionHistoryService.findByItem(context, item);
if (history == null) {
// not all items have a version history
// if an item does not have a version history, it is by definition the latest version
return true;
}
// start with the very latest version of the given item (may still be in workspace)
Version latestVersion = versionHistoryService.getLatestVersion(context, history);
// find the latest version of the given item that is archived
while (latestVersion != null && !latestVersion.getItem().isArchived()) {
latestVersion = versionHistoryService.getPrevious(context, history, latestVersion);
}
// could not find an archived version of the given item
if (latestVersion == null) {
// this scenario should never happen, but let's err on the side of showing too many items vs. to little
// (see discovery.xml, a lot of discovery configs filter out all items that are not the latest version)
return true;
}
// sanity check
assert latestVersion.getItem().isArchived();
return item.equals(latestVersion.getItem());
}
@Override
public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem)
throws SQLException, IOException {
SolrInputDocument doc = buildDocument(context, indexableItem);
doc.addField(STATUS_FIELD, STATUS_FIELD_PREDB);
return doc;
}
@Override
public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item,
List<DiscoveryConfiguration> discoveryConfigurations)
@@ -713,26 +764,31 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
}
@Override
public List getIndexableObjects(Context context, Item object) throws SQLException {
List<IndexableObject> results = new ArrayList<>();
if (object.isArchived() || object.isWithdrawn()) {
// We only want to index an item as an item if it is not in workflow
results.addAll(Arrays.asList(new IndexableItem(object)));
} else {
// Check if we have a workflow / workspace item
final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, object);
if (workspaceItem != null) {
results.addAll(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
} else {
// Check if we a workflow item
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, object);
if (xmlWorkflowItem != null) {
results.addAll(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
}
public List getIndexableObjects(Context context, Item item) throws SQLException {
if (item.isArchived() || item.isWithdrawn()) {
// we only want to index an item as an item if it is not in workflow
return List.of(new IndexableItem(item));
}
return results;
final WorkspaceItem workspaceItem = workspaceItemService.findByItem(context, item);
if (workspaceItem != null) {
// a workspace item is linked to the given item
return List.copyOf(workspaceItemIndexFactory.getIndexableObjects(context, workspaceItem));
}
final XmlWorkflowItem xmlWorkflowItem = xmlWorkflowItemService.findByItem(context, item);
if (xmlWorkflowItem != null) {
// a workflow item is linked to the given item
return List.copyOf(workflowItemIndexFactory.getIndexableObjects(context, xmlWorkflowItem));
}
if (!isLatestVersion(context, item)) {
// the given item is an older version of another item
return List.of(new IndexableItem(item));
}
// nothing to index
return List.of();
}
@Override

View File

@@ -46,6 +46,14 @@ public interface IndexFactory<T extends IndexableObject, S> {
*/
SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException;
/**
* Create solr document with all the shared fields initialized.
* Can contain special fields required for "new" documents vs regular buildDocument
* @param indexableObject the indexableObject that we want to index
* @return initialized solr document
*/
SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException;
/**
* Write the provided document to the solr core
* @param context DSpace context object

View File

@@ -5,7 +5,7 @@
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.utils;
package org.dspace.discovery.utils;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
@@ -19,10 +19,6 @@ import java.util.Objects;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.query.SearchQueryConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context;
import org.dspace.core.LogHelper;
import org.dspace.discovery.DiscoverFacetField;
@@ -32,6 +28,7 @@ import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration;
@@ -40,17 +37,11 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.discovery.configuration.DiscoverySortConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;
/**
* This class builds the queries for the /search and /facet endpoints.
*/
@Component
public class DiscoverQueryBuilder implements InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class);
@@ -74,51 +65,60 @@ public class DiscoverQueryBuilder implements InitializingBean {
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page)
throws DSpaceBadRequestException {
String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Integer pageSize, Long offset, String sortProperty,
String sortDirection) throws SearchServiceException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page);
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset,
sortProperty, sortDirection);
}
/**
* Build a discovery query
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/
public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page)
throws DSpaceBadRequestException {
String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Integer pageSize, Long offset, String sortProperty,
String sortDirection)
throws IllegalArgumentException, SearchServiceException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes);
dsoTypes);
//When all search criteria are set, configure facet results
addFaceting(context, scope, queryArgs, discoveryConfiguration);
//Configure pagination and sorting
configurePagination(page, queryArgs);
configureSorting(page, queryArgs, discoveryConfiguration.getSearchSortConfiguration());
configurePagination(pageSize, offset, queryArgs);
configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration());
addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs);
return queryArgs;
@@ -128,11 +128,11 @@ public class DiscoverQueryBuilder implements InitializingBean {
DiscoverQuery queryArgs) {
if (discoveryConfiguration.getHitHighlightingConfiguration() != null) {
List<DiscoveryHitHighlightFieldConfiguration> metadataFields = discoveryConfiguration
.getHitHighlightingConfiguration().getMetadataFields();
.getHitHighlightingConfiguration().getMetadataFields();
for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) {
queryArgs.addHitHighlightingField(
new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(),
fieldConfiguration.getSnippets()));
new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(),
fieldConfiguration.getSnippets()));
}
}
}
@@ -140,92 +140,97 @@ public class DiscoverQueryBuilder implements InitializingBean {
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param page the pageable for this discovery query
* @param facetName the facet field
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type
* @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters,
String dsoType, Pageable page, String facetName)
throws DSpaceBadRequestException {
String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Integer pageSize, Long offset, String facetName)
throws IllegalArgumentException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName);
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset,
facetName);
}
/**
* Create a discovery facet query.
*
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query
* @param facetName the facet field
* @param context the DSpace context
* @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types
* @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param facetName the facet field
*/
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName)
throws DSpaceBadRequestException {
String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Integer pageSize, Long offset, String facetName)
throws IllegalArgumentException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes);
//When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page);
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize);
//We don' want any search results, we only want facet values
queryArgs.setMaxResults(0);
//Configure pagination
configurePaginationForFacets(page, queryArgs);
configurePaginationForFacets(offset, queryArgs);
return queryArgs;
}
private void configurePaginationForFacets(Pageable page, DiscoverQuery queryArgs) {
if (page != null) {
queryArgs.setFacetOffset(Math.toIntExact(page.getOffset()));
private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) {
if (offset != null) {
queryArgs.setFacetOffset(Math.toIntExact(offset));
}
}
private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, String facetName, Pageable page)
throws DSpaceBadRequestException {
DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration,
String facetName, Integer pageSize)
throws IllegalArgumentException {
DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName);
if (facet != null) {
queryArgs.setFacetMinCount(1);
int pageSize = Math.min(pageSizeLimit, page.getPageSize());
pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit;
fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize);
} else {
throw new DSpaceBadRequestException(facetName + " is not a valid search facet");
throw new IllegalArgumentException(facetName + " is not a valid search facet");
}
return queryArgs;
}
private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) {
DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) {
if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) {
try {
FacetYearRange facetYearRange =
searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs);
searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs);
queryArgs.addYearRangeFacet(facet, facetYearRange);
@@ -241,18 +246,18 @@ public class DiscoverQueryBuilder implements InitializingBean {
int facetLimit = pageSize + 1;
//This should take care of the sorting for us
queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit,
facet.getSortOrderSidebar(), StringUtils.trimToNull(prefix)));
facet.getSortOrderSidebar(),
StringUtils.trimToNull(prefix)));
}
}
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query,
List<SearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException {
List<QueryBuilderSearchFilter> searchFilters, List<String> dsoTypes)
throws IllegalArgumentException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
//Add search filters
queryArgs.addFilterQueries(convertFilters(context, discoveryConfiguration, searchFilters));
queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters));
//Set search query
if (StringUtils.isNotBlank(query)) {
@@ -274,30 +279,17 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray(
new String[discoveryConfiguration.getDefaultFilterQueries()
.size()]));
new String[discoveryConfiguration
.getDefaultFilterQueries()
.size()]));
return queryArgs;
}
private void configureSorting(Pageable page, DiscoverQuery queryArgs,
DiscoverySortConfiguration searchSortConfiguration) throws DSpaceBadRequestException {
String sortBy = null;
String sortOrder = null;
//Read the Pageable object if there is one
if (page != null) {
Sort sort = page.getSort();
if (sort != null && sort.iterator().hasNext()) {
Sort.Order order = sort.iterator().next();
sortBy = order.getProperty();
sortOrder = order.getDirection().name();
}
}
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new InvalidSearchRequestException(
"The field: " + sortBy + "is not configured for the configuration!");
}
private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs,
DiscoverySortConfiguration searchSortConfiguration)
throws IllegalArgumentException, SearchServiceException {
String sortBy = sortProperty;
String sortOrder = sortDirection;
//Load defaults if we did not receive values
if (sortBy == null) {
@@ -307,24 +299,30 @@ public class DiscoverQueryBuilder implements InitializingBean {
sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder);
}
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new SearchServiceException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Update Discovery query
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
.getSortFieldConfiguration(sortBy);
.getSortFieldConfiguration(sortBy);
if (sortFieldConfiguration != null) {
String sortField = searchService
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType());
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType());
if ("asc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc);
} else if ("desc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc);
} else {
throw new DSpaceBadRequestException(sortOrder + " is not a valid sort order");
throw new IllegalArgumentException(sortOrder + " is not a valid sort order");
}
} else {
throw new DSpaceBadRequestException(sortBy + " is not a valid sort field");
throw new IllegalArgumentException(sortBy + " is not a valid sort field");
}
}
@@ -334,7 +332,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) {
!searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
}
return sortOrder;
@@ -344,7 +342,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) {
!searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) {
return sortBy;
@@ -354,66 +352,31 @@ public class DiscoverQueryBuilder implements InitializingBean {
return sortBy;
}
private void configurePagination(Pageable page, DiscoverQuery queryArgs) {
if (page != null) {
queryArgs.setMaxResults(Math.min(pageSizeLimit, page.getPageSize()));
queryArgs.setStart(Math.toIntExact(page.getOffset()));
} else {
queryArgs.setMaxResults(pageSizeLimit);
queryArgs.setStart(0);
}
private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) {
queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit);
queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0);
}
private String getDsoType(String dsoType) throws DSpaceBadRequestException {
private String getDsoType(String dsoType) throws IllegalArgumentException {
for (IndexFactory indexFactory : indexableFactories) {
if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) {
return indexFactory.getType();
}
}
throw new DSpaceBadRequestException(dsoType + " is not a valid DSpace Object type");
throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type");
}
public void setIndexableFactories(List<IndexFactory> indexableFactories) {
this.indexableFactories = indexableFactories;
}
private String[] convertFilters(Context context, DiscoveryConfiguration discoveryConfiguration,
List<SearchFilter> searchFilters) throws DSpaceBadRequestException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
SearchQueryConverter searchQueryConverter = new SearchQueryConverter();
List<SearchFilter> transformedFilters = searchQueryConverter.convert(searchFilters);
try {
for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(transformedFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new DSpaceBadRequestException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new DSpaceBadRequestException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs,
DiscoveryConfiguration discoveryConfiguration) {
List<DiscoverySearchFilterFacet> facets = discoveryConfiguration.getSidebarFacets();
log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets
.size() : null));
.size() : null));
if (facets != null) {
queryArgs.setFacetMinCount(1);
@@ -427,4 +390,34 @@ public class DiscoverQueryBuilder implements InitializingBean {
return queryArgs;
}
private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration,
List<QueryBuilderSearchFilter> searchFilters)
throws IllegalArgumentException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
try {
for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new IllegalArgumentException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
}

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.utils.parameter;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* Representation for a Discovery search filter
*/
public class QueryBuilderSearchFilter {
private String name;
private String operator;
private String value;
public QueryBuilderSearchFilter(final String name, final String operator, final String value) {
this.name = name;
this.operator = operator;
this.value = value;
}
public String getName() {
return name;
}
public String getOperator() {
return operator;
}
public String getValue() {
return value;
}
public String toString() {
return "QueryBuilderSearchFilter{" +
"name='" + name + '\'' +
", operator='" + operator + '\'' +
", value='" + value + '\'' +
'}';
}
public boolean equals(Object object) {
if (object instanceof QueryBuilderSearchFilter) {
QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object;
if (!StringUtils.equals(obj.getName(), getName())) {
return false;
}
if (!StringUtils.equals(obj.getOperator(), getOperator())) {
return false;
}
if (!StringUtils.equals(obj.getValue(), getValue())) {
return false;
}
return true;
}
return false;
}
public int hashCode() {
return Objects.hash(name, operator, value);
}
}

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.eperson;
import static org.dspace.content.Item.ANY;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
@@ -23,6 +25,7 @@ import org.apache.commons.codec.DecoderException;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
@@ -30,6 +33,7 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObjectServiceImpl;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -43,6 +47,7 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event;
import org.dspace.util.UUIDUtils;
import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.dao.VersionDAO;
@@ -96,6 +101,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
protected VersionDAO versionDAO;
@Autowired(required = true)
protected ClaimedTaskService claimedTaskService;
@Autowired
protected OrcidTokenService orcidTokenService;
protected EPersonServiceImpl() {
super();
@@ -379,6 +386,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
group.getMembers().remove(ePerson);
}
orcidTokenService.deleteByEPerson(context, ePerson);
// Remove any subscriptions
subscribeService.deleteByEPerson(context, ePerson);
@@ -570,6 +579,15 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
return ePersonDAO.countRows(context);
}
@Override
public EPerson findByProfileItem(Context context, Item profile) throws SQLException {
List<MetadataValue> owners = itemService.getMetadata(profile, "dspace", "object", "owner", ANY);
if (CollectionUtils.isEmpty(owners)) {
return null;
}
return find(context, UUIDUtils.fromString(owners.get(0).getAuthority()));
}
@Override
public String getName(EPerson dso) {
return dso.getName();

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName;
import org.dspace.content.service.DSpaceObjectLegacySupportService;
import org.dspace.content.service.DSpaceObjectService;
@@ -263,4 +264,16 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
int countTotal(Context context) throws SQLException;
/**
* Find the EPerson related to the given profile item. If the given item is not
* a profile item, null is returned.
*
* @param context The relevant DSpace Context.
* @param profile the profile item to search for
* @return the EPerson, if any
* @throws SQLException An exception that provides information on a database
* access error or other errors.
*/
EPerson findByProfileItem(Context context, Item profile) throws SQLException;
}

View File

@@ -140,7 +140,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath()));
externalDataObject
.addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null,
orcidUrl + person.getName().getPath()));
orcidUrl + "/" + person.getName().getPath()));
if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) {
externalDataObject.setDisplayValue(lastName + ", " + firstName);
externalDataObject.setValue(lastName + ", " + firstName);

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class CiniiFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "ciniiMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,447 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.dspace.services.ConfigurationService;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Cinii
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String urlSearch;
@Autowired
private LiveImportClient liveImportClient;
@Autowired
private ConfigurationService configurationService;
@Override
public String getImportSource() {
return "cinii";
}
@Override
public void init() throws Exception {}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Cinii");
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
/**
* This class is a Callable implementation to get CiNii entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, count, null, null, null, start, queryString);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> tmp = search(id, appId);
if (CollectionUtils.isNotEmpty(tmp)) {
tmp.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(tmp);
}
}
return records;
}
}
/**
* This class is a Callable implementation to get an CiNii entry using CiNii ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String id = query.getParameterAsClass("id", String.class);
List<ImportRecord> importRecord = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecord)) {
importRecord.forEach(x -> x.addValue(createIdentifier(id)));
}
return importRecord;
}
}
/**
* This class is a Callable implementation to search CiNii entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, maxResult, author, title, year, start, null);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> importRecords = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecords)) {
importRecords.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(importRecords);
}
}
return records;
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an CiNii query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String queryString = query.getParameterAsClass("query", String.class);
return countCiniiElement(appId, null, null, null, null, null, queryString);
}
}
/**
* Get metadata by searching CiNii RDF API with CiNii NAID
*
* @param id CiNii NAID to search by
* @param appId registered application identifier for the API
* @return record metadata
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
*/
protected List<ImportRecord> search(String id, String appId)
throws IOException, HttpException {
try {
List<ImportRecord> records = new LinkedList<ImportRecord>();
URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
records.add(transformSourceRecords(record));
}
return records;
} catch (URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
return root.getChildren();
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789)
* to the searched CiNii articles
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private List<String> getCiniiIds(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
List<String> ids = new ArrayList<>();
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
if (StringUtils.isNotBlank(appId)) {
uriBuilder.addParameter("appid", appId);
}
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
int url_len = this.url.length() - 1;
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"),
Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"));
XPathExpression<Attribute> xpath = XPathFactory.instance().compile("//ns:item/@rdf:about",
Filters.attribute(), null, namespaces);
List<Attribute> recordsList = xpath.evaluate(root);
for (Attribute item : recordsList) {
String value = item.getValue();
if (value.length() > url_len) {
ids.add(value.substring(url_len + 1));
}
}
return ids;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns the total number of CiNii articles returned by a specific query
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private Integer countCiniiElement(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
uriBuilder.addParameter("appid", appId);
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays
.asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//opensearch:totalResults",
Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(root);
if (nodes != null && !nodes.isEmpty()) {
return Integer.parseInt(((Element) nodes.get(0)).getText());
}
return 0;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private MetadatumDTO createIdentifier(String id) {
MetadatumDTO metadatumDTO = new MetadatumDTO();
metadatumDTO.setSchema("dc");
metadatumDTO.setElement("identifier");
metadatumDTO.setQualifier("other");
metadatumDTO.setValue(id);
return metadatumDTO;
}
}

View File

@@ -0,0 +1,173 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the ScopusID, orcid, author name and affiliationID
* from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor {
private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private MetadataFieldConfig orcid;
private MetadataFieldConfig scopusId;
private MetadataFieldConfig authname;
private MetadataFieldConfig affiliation;
private Map<String, String> affId2affName = new HashMap<String, String>();
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query),
* different types of values will be added to the MetadatumDTO list.
*
* @param element A class to retrieve metadata from.
* @return A collection of import records. Only the ScopusID, orcid, author name and affiliation
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
fillAffillation(element);
try {
List<Element> nodes = element.getChildren("author", NAMESPACE);
for (Element el : nodes) {
metadatums = getMetadataOfAuthors(el);
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
return values;
}
/**
* Retrieve the the ScopusID, orcid, author name and affiliationID
* metadata associated with the given element object.
* If the value retrieved from the element is empty
* it is set PLACEHOLDER_PARENT_METADATA_VALUE
*
* @param element A class to retrieve metadata from
* @throws JaxenException If Xpath evaluation failed
*/
private List<MetadatumDTO> getMetadataOfAuthors(Element element) throws JaxenException {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
Element authname = element.getChild("authname", NAMESPACE);
Element scopusId = element.getChild("authid", NAMESPACE);
Element orcid = element.getChild("orcid", NAMESPACE);
Element afid = element.getChild("afid", NAMESPACE);
addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname));
addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId));
addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid));
addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue())
? this.affId2affName.get(afid.getValue()) : null, this.affiliation));
return metadatums;
}
private void addMetadatum(List<MetadatumDTO> list, MetadatumDTO metadatum) {
if (Objects.nonNull(metadatum)) {
list.add(metadatum);
}
}
private String getElementValue(Element element) {
if (Objects.nonNull(element)) {
return element.getValue();
}
return StringUtils.EMPTY;
}
private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) {
if (StringUtils.isBlank(value)) {
return null;
}
MetadatumDTO metadata = new MetadatumDTO();
metadata.setElement(metadaConfig.getElement());
metadata.setQualifier(metadaConfig.getQualifier());
metadata.setSchema(metadaConfig.getSchema());
metadata.setValue(value);
return metadata;
}
private void fillAffillation(Element element) {
try {
List<Element> nodes = element.getChildren("affiliation", NAMESPACE);
for (Element el : nodes) {
fillAffiliation2Name(el);
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
}
private void fillAffiliation2Name(Element element) throws JaxenException {
Element affilationName = element.getChild("affilname", NAMESPACE);
Element affilationId = element.getChild("afid", NAMESPACE);
if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) {
affId2affName.put(affilationId.getValue(), affilationName.getValue());
}
}
public MetadataFieldConfig getAuthname() {
return authname;
}
public void setAuthname(MetadataFieldConfig authname) {
this.authname = authname;
}
public MetadataFieldConfig getOrcid() {
return orcid;
}
public void setOrcid(MetadataFieldConfig orcid) {
this.orcid = orcid;
}
public MetadataFieldConfig getScopusId() {
return scopusId;
}
public void setScopusId(MetadataFieldConfig scopusId) {
this.scopusId = scopusId;
}
public MetadataFieldConfig getAffiliation() {
return affiliation;
}
public void setAffiliation(MetadataFieldConfig affiliation) {
this.affiliation = affiliation;
}
}

View File

@@ -0,0 +1,110 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the Scopus startPage and endPage from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private MetadataFieldConfig startPageMetadata;
private MetadataFieldConfig endPageMetadata;
/**
* Retrieve the metadata associated with the given Element object.
* Depending on the retrieved node (using the query),
* StartPage and EndPage values will be added to the MetadatumDTO list
*
* @param el A class to retrieve metadata from.
* @return A collection of import records. Only the StartPage and EndPage
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element el) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = el.getChildren(query, Namespace.getNamespace(ns));
for (Element element : nodes) {
metadatums = getMetadatum(element.getValue());
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
}
return values;
}
private List<MetadatumDTO> getMetadatum(String value) {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
if (StringUtils.isBlank(value)) {
return null;
}
String [] range = value.split("-");
if (range.length == 2) {
metadatums.add(setStartPage(range));
metadatums.add(setEndPage(range));
} else if (range.length != 0) {
metadatums.add(setStartPage(range));
}
return metadatums;
}
private MetadatumDTO setEndPage(String[] range) {
MetadatumDTO endPage = new MetadatumDTO();
endPage.setValue(range[1]);
endPage.setElement(endPageMetadata.getElement());
endPage.setQualifier(endPageMetadata.getQualifier());
endPage.setSchema(endPageMetadata.getSchema());
return endPage;
}
private MetadatumDTO setStartPage(String[] range) {
MetadatumDTO startPage = new MetadatumDTO();
startPage.setValue(range[0]);
startPage.setElement(startPageMetadata.getElement());
startPage.setQualifier(startPageMetadata.getQualifier());
startPage.setSchema(startPageMetadata.getSchema());
return startPage;
}
public MetadataFieldConfig getStartPageMetadata() {
return startPageMetadata;
}
public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) {
this.startPageMetadata = startPageMetadata;
}
public MetadataFieldConfig getEndPageMetadata() {
return endPageMetadata;
}
public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) {
this.endPageMetadata = endPageMetadata;
}
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* This contributor replace specific character in the metadata value.
* It is useful for some provider (e.g. Scopus) which use containing "/" character.
* Actually, "/" will never encode by framework in URL building. In the same ways, if we
* encode "/" -> %2F, it will be encoded by framework and become %252F.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private char characterToBeReplaced;
private char characterToReplaceWith;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = element.getChildren(query, Namespace.getNamespace(ns));
for (Element el : nodes) {
values.add(getMetadatum(field, el.getValue()));
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (Objects.isNull(field)) {
return null;
}
dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith));
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
public void setCharacterToBeReplaced(int characterToBeReplaced) {
this.characterToBeReplaced = (char)characterToBeReplaced;
}
public void setCharacterToReplaceWith(int characterToReplaceWith) {
this.characterToReplaceWith = (char)characterToReplaceWith;
}
}

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor is able to concat multi value.
* Given a certain path, if it contains several nodes,
* the values of nodes will be concatenated into a single one.
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node,
* which may contain several <p> paragraphs,
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleConcatContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
StringBuilder text = new StringBuilder();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
if (StringUtils.isNotBlank(element.getText())) {
text.append(element.getText());
}
} else {
log.warn("node of type: " + el.getClass());
}
}
if (StringUtils.isNotBlank(text.toString())) {
values.add(metadataFieldMapping.toDCValue(field, text.toString()));
}
return values;
}
}

View File

@@ -0,0 +1,75 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web of Science specific implementation of {@link MetadataContributor}.
* This contributor can perform research on multi-paths.
* For example, to populate the subject metadata, in the Web of Science response
* the values are contained in different paths,
* so this Contributor allows you to collect the values by configuring the paths in the paths list.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private List<String> paths;
public SimpleMultiplePathContributor() {}
public SimpleMultiplePathContributor(List<String> paths) {
this.paths = paths;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
for (String path : this.paths) {
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText()));
} else {
log.warn("node of type: " + el.getClass());
}
}
}
return values;
}
public List<String> getPaths() {
return paths;
}
public void setPaths(List<String> paths) {
this.paths = paths;
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor checks for each node returned for the supplied path
* if node contains supplied attribute - the value of the current node is taken if exist.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com)
*/
public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private String attribute;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
if (StringUtils.isNotBlank(attributeValue)) {
values.add(metadataFieldMapping.toDCValue(this.field, attributeValue));
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
}

View File

@@ -34,10 +34,10 @@ import org.springframework.beans.factory.annotation.Autowired;
*/
public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field;
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected MetadataFieldConfig field;
/**
* Return prefixToNamespaceMapping
*
@@ -171,4 +171,5 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
}
return values;
}
}
}

View File

@@ -0,0 +1,160 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
* This contributor checks for each node returned for the given path if the node contains "this.attribute"
* and then checks if the attribute value is one of the values configured
* in the "this.attributeValue2metadata" map, if the value of the current known is taken.
* If "this.firstChild" is true, it takes the value of the child of the known.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosAttribute2ValueContributor implements MetadataContributor<Element> {
private final static Logger log = LogManager.getLogger();
private String query;
private String attribute;
private boolean firstChild;
private String childName;
private Map<String, String> prefixToNamespaceMapping;
private Map<String, MetadataFieldConfig> attributeValue2metadata;
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
public WosAttribute2ValueContributor() {}
public WosAttribute2ValueContributor(String query,
Map<String, String> prefixToNamespaceMapping,
Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.attributeValue2metadata = attributeValue2metadata;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
setField(attributeValue, element, values);
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setField(String attributeValue, Element el, List<MetadatumDTO> values) {
for (String id : attributeValue2metadata.keySet()) {
if (StringUtils.equals(id, attributeValue)) {
if (this.firstChild) {
String value = el.getChild(this.childName).getValue();
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value));
} else {
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText()));
}
}
}
}
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
public Map<String, MetadataFieldConfig> getAttributeValue2metadata() {
return attributeValue2metadata;
}
public void setAttributeValue2metadata(Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.attributeValue2metadata = attributeValue2metadata;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public boolean isFirstChild() {
return firstChild;
}
public void setFirstChild(boolean firstChild) {
this.firstChild = firstChild;
}
public String getChildName() {
return childName;
}
public void setChildName(String childName) {
this.childName = childName;
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can retrieve the identifiers
* configured in "this.identifire2field" from the Web of Science response.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierContributor extends SimpleXpathMetadatumContributor {
protected Map<String, MetadataFieldConfig> identifier2field;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Element> xpath =
XPathFactory.instance().compile(query, Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(element);
for (Element el : nodes) {
String type = el.getAttributeValue("type");
setIdentyfier(type, el, values);
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
for (String id : identifier2field.keySet()) {
if (StringUtils.equals(id, type)) {
String value = el.getAttributeValue("value");
values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value));
}
}
}
public Map<String, MetadataFieldConfig> getIdentifier2field() {
return identifier2field;
}
public void setIdentifier2field(Map<String, MetadataFieldConfig> identifier2field) {
this.identifier2field = identifier2field;
}
}

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = ((Element) el).getChild("name");
if (Objects.nonNull(element)) {
String type = element.getAttributeValue("role");
setIdentyfier(type, element, values);
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
if (StringUtils.equals("researcher_id", type)) {
String value = el.getAttributeValue("r_id");
if (StringUtils.isNotBlank(value)) {
values.add(metadataFieldMapping.toDCValue(this.field, value));
}
}
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "pubmedEuropeMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,419 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
/**
* Implements a data source for querying PubMed Europe
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "pubmedeu";
}
/**
* Get a single record from the PubMed Europe.
*
* @param id Identifier for the record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query A query string to base the search on.
* @return The sum of the matching records over this import source
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
/**
* Find records based on a object query.
*
* @param query A query object to base the search on.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
/**
* Get a single record from the PubMed Europe.
*
* @param query A query matching a single record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Finds records based on query object.
*
* @param query A query object to base the search on.
* @return A collection of import records.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for PubMed Europe");
}
@Override
public void init() throws Exception {}
public List<ImportRecord> getByPubmedEuropeID(String pubmedID, Integer start, Integer size)
throws IOException, HttpException {
String query = "(EXT_ID:" + pubmedID + ")";
return search(query, size < 1 ? 1 : size, start);
}
/**
* This class is a Callable implementation to get PubMed Europe entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
return search(queryString, count, start);
}
}
/**
* This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0);
}
}
/**
* This class is a Callable implementation to search PubMed Europe entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
return search(title, author, year, maxResult, start);
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an PubMed Europe query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
try {
return count(query.getParameterAsClass("query", String.class));
} catch (Exception e) {
throw new RuntimeException();
}
}
}
/**
* Returns the total number of PubMed Europe publications returned by a specific query
*
* @param query A keyword or combination of keywords to be searched
* @throws URISyntaxException If URI syntax error
* @throws ClientProtocolException The client protocol exception
* @throws IOException If IO error
* @throws JaxenException If Xpath evaluation failed
*/
public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
Element element = root.getChild("hitCount");
return Integer.parseInt(element.getValue());
} catch (JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
public List<ImportRecord> search(String title, String author, int year, int count, int start)
throws IOException {
StringBuffer query = new StringBuffer();
query.append("(");
if (StringUtils.isNotBlank(title)) {
query.append("TITLE:").append(title);
query.append(")");
}
if (StringUtils.isNotBlank(author)) {
// Search for a surname and (optionally) initial(s) in publication author lists
// AUTH:einstein, AUTH:”Smith AB”
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
if (query.length() > 0) {
query.append(" AND ");
}
query.append("(");
int countAuthors = 0;
for (String auth : authors) {
countAuthors++;
query.append("AUTH:\"").append(auth).append("\"");
if (countAuthors < authors.length) {
query.append(" AND ");
}
}
query.append(")");
}
if (year != -1) {
if (query.length() > 0) {
query.append(" AND ");
}
query.append("( PUB_YEAR:").append(year).append(")");
}
query.append(")");
return search(query.toString(), count, start);
}
/**
* Returns a list of PubMed Europe publication records
*
* @param query A keyword or combination of keywords to be searched
* @param size The number of search results per page
* @param start Start number for the acquired search result list
* @throws IOException If IO error
*/
public List<ImportRecord> search(String query, Integer size, Integer start) throws IOException {
List<ImportRecord> results = new ArrayList<>();
try {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(size));
uriBuilder.addParameter("query", query);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
boolean lastPage = false;
int skipped = 0;
while (!lastPage || results.size() < size) {
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String cursorMark = StringUtils.EMPTY;
if (StringUtils.isNotBlank(response)) {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
XPathFactory xpfac = XPathFactory.instance();
XPathExpression<Element> xPath = xpfac.compile("//responseWrapper/resultList/result",
Filters.element());
List<Element> records = xPath.evaluate(document);
if (records.size() > 0) {
for (Element item : records) {
if (start > skipped) {
skipped++;
} else {
results.add(transformSourceRecords(item));
}
}
} else {
lastPage = true;
break;
}
Element root = document.getRootElement();
Element nextCursorMark = root.getChild("nextCursorMark");
cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY;
}
if (StringUtils.isNotBlank(cursorMark)) {
uriBuilder.setParameter("cursorMar", cursorMark);
} else {
lastPage = true;
}
}
} catch (URISyntaxException | JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
return results;
}
private String buildURI(Integer pageSize, String query) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(pageSize));
uriBuilder.addParameter("query", query);
return uriBuilder.toString();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class ScopusFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "scopusMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Scopus
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com)
*/
public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private int timeout = 1000;
int itemPerPage = 25;
private String url;
private String apiKey;
private String instKey;
private String viewMode;
@Autowired
private LiveImportClient liveImportClient;
public LiveImportClient getLiveImportClient() {
return liveImportClient;
}
public void setLiveImportClient(LiveImportClient liveImportClient) {
this.liveImportClient = liveImportClient;
}
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "scopus";
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query)).size();
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString())).size();
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query));
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = null;
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
if (isEID(query.toString())) {
records = retry(new FindByIdCallable(query.toString()));
} else {
records = retry(new SearchByQueryCallable(query));
}
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scopus");
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new FindByQueryCallable(query));
}
private boolean isEID(String query) {
Pattern pattern = Pattern.compile("2-s2\\.0-\\d+");
Matcher match = pattern.matcher(query);
if (match.matches()) {
return true;
}
return false;
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
// Execute the request.
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(query, null, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("opensearch:totalResults", Filters.element(), null, namespaces);
Element count = xpath.evaluateFirst(root);
try {
return Integer.parseInt(count.getText());
} catch (NumberFormatException e) {
return null;
}
}
return null;
}
}
/**
* This class is a Callable implementation to get a Scopus entry using EID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String eid;
private FindByIdCallable(String eid) {
this.eid = eid;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "EID(" + eid.replace("!", "/") + ")";
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* This class implements a callable to get the items based on query parameters
*/
private class FindByQueryCallable implements Callable<List<ImportRecord>> {
private String title;
private String author;
private Integer year;
private Integer start;
private Integer count;
private FindByQueryCallable(Query query) {
this.title = query.getParameterAsClass("title", String.class);
this.year = query.getParameterAsClass("year", Integer.class);
this.author = query.getParameterAsClass("author", String.class);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "";
StringBuffer query = new StringBuffer();
if (StringUtils.isNotBlank(title)) {
query.append("title(").append(title).append("");
}
if (StringUtils.isNotBlank(author)) {
// [FAU]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("AUTH(").append(author).append(")");
}
if (year != -1) {
// [DP]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("PUBYEAR IS ").append(year);
}
queryString = query.toString();
if (apiKey != null && !apiKey.equals("")) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
private Map<String, String> getRequestParameters(String query, String viewMode, Integer start, Integer count) {
Map<String, String> params = new HashMap<String, String>();
params.put("httpAccept", "application/xml");
params.put("apiKey", apiKey);
params.put("query", query);
if (StringUtils.isNotBlank(instKey)) {
params.put("insttoken", instKey);
}
if (StringUtils.isNotBlank(viewMode)) {
params.put("view", viewMode);
}
params.put("start", (Objects.nonNull(start) ? start + "" : "0"));
params.put("count", (Objects.nonNull(count) ? count + "" : "20"));
return params;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Element> records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom"));
return records;
} catch (JDOMException | IOException e) {
return new ArrayList<Element>();
}
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getViewMode() {
return viewMode;
}
public void setViewMode(String viewMode) {
this.viewMode = viewMode;
}
public String getApiKey() {
return apiKey;
}
public String getInstKey() {
return instKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public void setInstKey(String instKey) {
this.instKey = instKey;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class WOSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve
* metadata and metadata that will be set to the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "wosMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,329 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Web of Science.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private static final String AI_PATTERN = "^AI=(.*)";
private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$");
private int timeout = 1000;
private String url;
private String urlSearch;
private String apiKey;
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "wos";
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByQueryCallable(query));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8);
String url = urlSearch + queryString + "&count=1&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
XPathExpression<Element> xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]",
Filters.element(), null);
Element tot = xpath.evaluateFirst(root);
return Integer.valueOf(tot.getValue());
}
return null;
}
}
/**
* This class is a Callable implementation to get a Web of Science entry using Doi
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String doi;
private FindByIdCallable(String doi) {
this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
if (StringUtils.isNotBlank(apiKey)) {
String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = checkQuery(query.getParameterAsClass("query", String.class));
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8)
+ "&count=" + count + "&firstRecord=" + (start + 1);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> omElements = splitToRecords(response);
for (Element el : omElements) {
results.add(transformSourceRecords(el));
}
}
return results;
}
}
private Map<String, String> getRequestParameters() {
Map<String, String> params = new HashMap<String, String>();
params.put("Accept", "application/xml");
params.put("X-ApiKey", this.apiKey);
return params;
}
/**
* This method check if the query contain
* "AI=(...)" Author Identifier or a DOI "DO=(query)"
* or Accession Number "UT=(query)".
* Otherwise the value is placed in TS=(query) tag
* that searches for topic terms in the following fields within a document:
* Title, Abstract, Author keywords, Keywords Plus
*
* @param query
*/
private String checkQuery(String query) {
Pattern risPattern = Pattern.compile(AI_PATTERN);
Matcher risMatcher = risPattern.matcher(query.trim());
if (risMatcher.matches()) {
return query;
}
if (DoiCheck.isDoi(query)) {
// FIXME: workaround to be removed once fixed by the community the double post of query param
if (query.startsWith(",")) {
query = query.substring(1);
}
return "DO=(" + query + ")";
} else if (isIsi(query)) {
return "UT=(" + query + ")";
}
StringBuilder queryBuilder = new StringBuilder("TS=(");
queryBuilder.append(query).append(")");
return queryBuilder.toString();
}
private boolean isIsi(String query) {
if (query.startsWith("WOS:")) {
return true;
}
Matcher matcher = ISI_PATTERN.matcher(query.trim());
return matcher.matches();
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]",
Filters.element(), null).evaluate(root).get(0).getValue().trim();
Document intDocument = saxBuilder.build(new StringReader(cData));
XPathExpression<Element> xPath = XPathFactory.instance().compile("*", Filters.element(), null);
List<Element> records = xPath.evaluate(intDocument.getRootElement());
if (CollectionUtils.isNotEmpty(records)) {
return records;
}
} catch (JDOMException | IOException e) {
log.error(e.getMessage());
return new ArrayList<Element>();
}
return new ArrayList<Element>();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}

View File

@@ -21,6 +21,7 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.RelationshipMetadataValue;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.content.service.ItemService;
@@ -55,14 +56,24 @@ public abstract class AbstractVersionProvider {
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
String unqualifiedMetadataField = metadataSchema.getName() + "." + metadataField.getElement();
if (getIgnoredMetadataFields().contains(metadataField.toString('.')) ||
getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY)) {
//Skip this metadata field
getIgnoredMetadataFields().contains(unqualifiedMetadataField + "." + Item.ANY) ||
aMd instanceof RelationshipMetadataValue) {
//Skip this metadata field (ignored and/or virtual)
continue;
}
itemService
.addMetadata(context, itemNew, metadataField, aMd.getLanguage(), aMd.getValue(), aMd.getAuthority(),
aMd.getConfidence());
itemService.addMetadata(
context,
itemNew,
metadataField.getMetadataSchema().getName(),
metadataField.getElement(),
metadataField.getQualifier(),
aMd.getLanguage(),
aMd.getValue(),
aMd.getAuthority(),
aMd.getConfidence(),
aMd.getPlace()
);
}
}

View File

@@ -15,7 +15,9 @@ import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context;
import org.dspace.identifier.IdentifierException;
@@ -44,6 +46,8 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
protected VersioningService versioningService;
@Autowired(required = true)
protected IdentifierService identifierService;
@Autowired(required = true)
protected RelationshipService relationshipService;
@Override
public Item createNewItemAndAddItInWorkspace(Context context, Item nativeItem) {
@@ -89,10 +93,18 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
}
}
/**
* Copy all data (minus a few exceptions) from the old item to the new item.
* @param c the DSpace context.
* @param itemNew the new version of the item.
* @param previousItem the old version of the item.
* @return the new version of the item, with data from the old item.
*/
@Override
public Item updateItemState(Context c, Item itemNew, Item previousItem) {
try {
copyMetadata(c, itemNew, previousItem);
copyRelationships(c, itemNew, previousItem);
createBundlesAndAddBitstreams(c, itemNew, previousItem);
try {
identifierService.reserve(c, itemNew);
@@ -114,4 +126,49 @@ public class DefaultItemVersionProvider extends AbstractVersionProvider implemen
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Copy all relationships of the old item to the new item.
* At this point in the lifecycle of the item-version (before archival), only the opposite item receives
* "latest" status. On item archival of the item-version, the "latest" status of the relevant relationships
* will be updated.
* @param context the DSpace context.
* @param newItem the new version of the item.
* @param oldItem the old version of the item.
*/
protected void copyRelationships(
Context context, Item newItem, Item oldItem
) throws SQLException, AuthorizeException {
List<Relationship> oldRelationships = relationshipService.findByItem(context, oldItem, -1, -1, false, true);
for (Relationship oldRelationship : oldRelationships) {
if (oldRelationship.getLeftItem().equals(oldItem)) {
// current item is on left side of this relationship
relationshipService.create(
context,
newItem, // new item
oldRelationship.getRightItem(),
oldRelationship.getRelationshipType(),
oldRelationship.getLeftPlace(),
oldRelationship.getRightPlace(),
oldRelationship.getLeftwardValue(),
oldRelationship.getRightwardValue(),
Relationship.LatestVersionStatus.RIGHT_ONLY // only mark the opposite side as "latest" for now
);
} else if (oldRelationship.getRightItem().equals(oldItem)) {
// current item is on right side of this relationship
relationshipService.create(
context,
oldRelationship.getLeftItem(),
newItem, // new item
oldRelationship.getRelationshipType(),
oldRelationship.getLeftPlace(),
oldRelationship.getRightPlace(),
oldRelationship.getLeftwardValue(),
oldRelationship.getRightwardValue(),
Relationship.LatestVersionStatus.LEFT_ONLY // only mark the opposite side as "latest" for now
);
}
}
}
}

View File

@@ -22,5 +22,12 @@ public interface ItemVersionProvider {
public void deleteVersionedItem(Context c, Version versionToDelete, VersionHistory history) throws SQLException;
/**
* Copy all data (minus a few exceptions) from the old item to the new item.
* @param c the DSpace context.
* @param itemNew the new version of the item.
* @param previousItem the old version of the item.
* @return the new version of the item, with data from the old item.
*/
public Item updateItemState(Context c, Item itemNew, Item previousItem);
}

View File

@@ -7,39 +7,66 @@
*/
package org.dspace.versioning;
import java.util.HashSet;
import java.util.Set;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.EntityType;
import org.dspace.content.Item;
import org.dspace.content.Relationship;
import org.dspace.content.RelationshipType;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.EntityTypeService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.RelationshipService;
import org.dspace.content.service.RelationshipTypeService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.discovery.IndexEventConsumer;
import org.dspace.event.Consumer;
import org.dspace.event.Event;
import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog;
/**
* When a new version of an item is published, unarchive the previous version and
* update {@link Relationship#latestVersionStatus} of the relevant relationships.
*
* @author Fabio Bolognesi (fabio at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
*/
public class VersioningConsumer implements Consumer {
private static Set<Item> itemsToProcess;
private static final Logger log = LogManager.getLogger(VersioningConsumer.class);
private Set<Item> itemsToProcess;
private VersionHistoryService versionHistoryService;
private VersioningService versioningService;
private ItemService itemService;
private EntityTypeService entityTypeService;
private RelationshipTypeService relationshipTypeService;
private RelationshipService relationshipService;
private RelationshipVersioningUtils relationshipVersioningUtils;
@Override
public void initialize() throws Exception {
versionHistoryService = VersionServiceFactory.getInstance().getVersionHistoryService();
versioningService = VersionServiceFactory.getInstance().getVersionService();
itemService = ContentServiceFactory.getInstance().getItemService();
entityTypeService = ContentServiceFactory.getInstance().getEntityTypeService();
relationshipTypeService = ContentServiceFactory.getInstance().getRelationshipTypeService();
relationshipService = ContentServiceFactory.getInstance().getRelationshipService();
relationshipVersioningUtils = VersionServiceFactory.getInstance().getRelationshipVersioningUtils();
}
@Override
@@ -49,37 +76,399 @@ public class VersioningConsumer implements Consumer {
@Override
public void consume(Context ctx, Event event) throws Exception {
if (itemsToProcess == null) {
itemsToProcess = new HashSet<Item>();
itemsToProcess = new HashSet<>();
}
int st = event.getSubjectType();
int et = event.getEventType();
// only items
if (event.getSubjectType() != Constants.ITEM) {
return;
}
if (st == Constants.ITEM && et == Event.INSTALL) {
Item item = (Item) event.getSubject(ctx);
if (item != null && item.isArchived()) {
VersionHistory history = versionHistoryService.findByItem(ctx, item);
if (history != null) {
Version latest = versionHistoryService.getLatestVersion(ctx, history);
Version previous = versionHistoryService.getPrevious(ctx, history, latest);
if (previous != null) {
Item previousItem = previous.getItem();
if (previousItem != null) {
previousItem.setArchived(false);
itemsToProcess.add(previousItem);
//Fire a new modify event for our previous item
//Due to the need to reindex the item in the search
//and browse index we need to fire a new event
ctx.addEvent(new Event(Event.MODIFY,
previousItem.getType(), previousItem.getID(),
null, itemService.getIdentifiers(ctx, previousItem)));
}
}
// only install events
if (event.getEventType() != Event.INSTALL) {
return;
}
// get the item (should be archived)
Item item = (Item) event.getSubject(ctx);
if (item == null || !item.isArchived()) {
return;
}
// get version history
VersionHistory history = versionHistoryService.findByItem(ctx, item);
if (history == null) {
return;
}
// get latest version
Version latestVersion = versionHistoryService.getLatestVersion(ctx, history);
if (latestVersion == null) {
return;
}
// get previous version
Version previousVersion = versionHistoryService.getPrevious(ctx, history, latestVersion);
if (previousVersion == null) {
return;
}
// get latest item
Item latestItem = latestVersion.getItem();
if (latestItem == null) {
String msg = String.format(
"Illegal state: Obtained version history of item with uuid %s, handle %s, but the latest item is null",
item.getID(), item.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
// get previous item
Item previousItem = previousVersion.getItem();
if (previousItem == null) {
return;
}
// unarchive previous item
unarchiveItem(ctx, previousItem);
// update relationships
updateRelationships(ctx, latestItem, previousItem);
}
protected void unarchiveItem(Context ctx, Item item) {
item.setArchived(false);
itemsToProcess.add(item);
//Fire a new modify event for our previous item
//Due to the need to reindex the item in the search
//and browse index we need to fire a new event
ctx.addEvent(new Event(
Event.MODIFY, item.getType(), item.getID(), null, itemService.getIdentifiers(ctx, item)
));
}
/**
* Update {@link Relationship#latestVersionStatus} of the relationships of both the old version and the new version
* of the item.
*
* This method will first locate all relationships that are eligible for an update,
* then it will try to match each of those relationships on the old version of given item
* with a relationship on the new version.
*
* One of the following scenarios will happen:
* - if a match is found, then the "latest" status on the side of given item is transferred from
* the old relationship to the new relationship. This implies that on the page of the third-party item,
* the old version of given item will NOT be shown anymore and the new version of given item will appear.
* Both versions of the given item still show the third-party item on their pages.
* - if a relationship only exists on the new version of given item, then this method does nothing.
* The status of those relationships should already have been set to "latest" on both sides during relationship
* creation.
* - if a relationship only exists on the old version of given item, then we assume that the relationship is no
* longer relevant to / has been removed from the new version of the item. The "latest" status is removed from
* the side of the given item. This implies that on the page of the third-party item,
* the relationship with given item will no longer be listed. The old version of given item still lists
* the third-party item and the new version doesn't.
* @param ctx the DSpace context.
* @param latestItem the new version of the item.
* @param previousItem the old version of the item.
*/
protected void updateRelationships(Context ctx, Item latestItem, Item previousItem) {
// check that the entity types of both items match
if (!doEntityTypesMatch(latestItem, previousItem)) {
return;
}
// get the entity type (same for both items)
EntityType entityType = getEntityType(ctx, latestItem);
if (entityType == null) {
return;
}
// get all relationship types that are linked to the given entity type
List<RelationshipType> relationshipTypes = getRelationshipTypes(ctx, entityType);
if (CollectionUtils.isEmpty(relationshipTypes)) {
return;
}
for (RelationshipType relationshipType : relationshipTypes) {
List<Relationship> latestItemRelationships = getAllRelationships(ctx, latestItem, relationshipType);
if (latestItemRelationships == null) {
continue;
}
List<Relationship> previousItemRelationships = getAllRelationships(ctx, previousItem, relationshipType);
if (previousItemRelationships == null) {
continue;
}
// NOTE: no need to loop through latestItemRelationships, because if no match can be found
// (meaning a relationship is only present on the new version of the item), then it's
// a newly added relationship and its status should have been set to BOTH during creation.
for (Relationship previousItemRelationship : previousItemRelationships) {
// determine on which side of the relationship the latest and previous item should be
boolean isLeft = previousItem.equals(previousItemRelationship.getLeftItem());
boolean isRight = previousItem.equals(previousItemRelationship.getRightItem());
if (isLeft == isRight) {
Item leftItem = previousItemRelationship.getLeftItem();
Item rightItem = previousItemRelationship.getRightItem();
String msg = String.format(
"Illegal state: could not determine side of item with uuid %s, handle %s in " +
"relationship with id %s, rightward name %s between " +
"left item with uuid %s, handle %s and right item with uuid %s, handle %s",
previousItem.getID(), previousItem.getHandle(), previousItemRelationship.getID(),
previousItemRelationship.getRelationshipType().getRightwardType(),
leftItem.getID(), leftItem.getHandle(), rightItem.getID(), rightItem.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
// get the matching relationship on the latest item
Relationship latestItemRelationship =
getMatchingRelationship(latestItem, isLeft, previousItemRelationship, latestItemRelationships);
// the other side of the relationship should be "latest", otherwise the relationship could not have been
// copied to the new item in the first place (by DefaultVersionProvider#copyRelationships)
if (relationshipVersioningUtils.otherSideIsLatest(
isLeft, previousItemRelationship.getLatestVersionStatus()
)) {
// Set the previous version of the item to non-latest. This implies that the previous version
// of the item will not be shown anymore on the page of the third-party item. That makes sense,
// because either the relationship has been deleted from the new version of the item (no match),
// or the matching relationship (linked to new version) will receive "latest" status in
// the next step.
LatestVersionStatusChangelog changelog =
relationshipVersioningUtils.updateLatestVersionStatus(previousItemRelationship, isLeft, false);
reindexRelationship(ctx, changelog, previousItemRelationship);
}
if (latestItemRelationship != null) {
// Set the new version of the item to latest if the relevant relationship exists (match found).
// This implies that the new version of the item will appear on the page of the third-party item.
// The old version of the item will not appear anymore on the page of the third-party item,
// see previous step.
LatestVersionStatusChangelog changelog =
relationshipVersioningUtils.updateLatestVersionStatus(latestItemRelationship, isLeft, true);
reindexRelationship(ctx, changelog, latestItemRelationship);
}
}
}
}
/**
* If the {@link Relationship#latestVersionStatus} of the relationship has changed,
* an "item modified" event should be fired for both the left and right item of the relationship.
* On one item the relation.* fields will change. On the other item the relation.*.latestForDiscovery will change.
* The event will cause the items to be re-indexed by the {@link IndexEventConsumer}.
* @param ctx the DSpace context.
* @param changelog indicates which side of the relationship has changed.
* @param relationship the relationship.
*/
protected void reindexRelationship(
Context ctx, LatestVersionStatusChangelog changelog, Relationship relationship
) {
if (changelog == NO_CHANGES) {
return;
}
// on one item, relation.* fields will change
// on the other item, relation.*.latestForDiscovery will change
// reindex left item
Item leftItem = relationship.getLeftItem();
itemsToProcess.add(leftItem);
ctx.addEvent(new Event(
Event.MODIFY, leftItem.getType(), leftItem.getID(), null, itemService.getIdentifiers(ctx, leftItem)
));
// reindex right item
Item rightItem = relationship.getRightItem();
itemsToProcess.add(rightItem);
ctx.addEvent(new Event(
Event.MODIFY, rightItem.getType(), rightItem.getID(), null, itemService.getIdentifiers(ctx, rightItem)
));
}
/**
* Given two items, check if their entity types match.
* If one or both items don't have an entity type, comparing is pointless and this method will return false.
* @param latestItem the item that represents the most recent version.
* @param previousItem the item that represents the second-most recent version.
* @return true if the entity types of both items are non-null and equal, false otherwise.
*/
protected boolean doEntityTypesMatch(Item latestItem, Item previousItem) {
String latestItemEntityType = itemService.getEntityTypeLabel(latestItem);
String previousItemEntityType = itemService.getEntityTypeLabel(previousItem);
// check if both items have an entity type
if (latestItemEntityType == null || previousItemEntityType == null) {
if (previousItemEntityType != null) {
log.warn(
"Inconsistency: Item with uuid {}, handle {} has NO entity type, " +
"but the previous version of that item with uuid {}, handle {} has entity type {}",
latestItem.getID(), latestItem.getHandle(),
previousItem.getID(), previousItem.getHandle(), previousItemEntityType
);
}
// one or both items do not have an entity type, so comparing is pointless
return false;
}
// check if the entity types are equal
if (!StringUtils.equals(latestItemEntityType, previousItemEntityType)) {
log.warn(
"Inconsistency: Item with uuid {}, handle {} has entity type {}, " +
"but the previous version of that item with uuid {}, handle {} has entity type {}",
latestItem.getID(), latestItem.getHandle(), latestItemEntityType,
previousItem.getID(), previousItem.getHandle(), previousItemEntityType
);
return false;
}
// success - the entity types of both items are non-null and equal
log.info(
"Item with uuid {}, handle {} and the previous version of that item with uuid {}, handle {} " +
"have the same entity type: {}",
latestItem.getID(), latestItem.getHandle(), previousItem.getID(), previousItem.getHandle(),
latestItemEntityType
);
return true;
}
/**
* Get the entity type (stored in metadata field dspace.entity.type) of any item.
* @param item the item.
* @return the entity type.
*/
protected EntityType getEntityType(Context ctx, Item item) {
try {
return itemService.getEntityType(ctx, item);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain entity type with label {} of item with uuid {}, handle {}",
itemService.getEntityTypeLabel(item), item.getID(), item.getHandle(), e
);
return null;
}
}
/**
* Get all relationship types that have the given entity type on their left and/or right side.
* @param ctx the DSpace context.
* @param entityType the entity type for which all relationship types should be found.
* @return a list of relationship types (possibly empty), or null in case of error.
*/
protected List<RelationshipType> getRelationshipTypes(Context ctx, EntityType entityType) {
try {
return relationshipTypeService.findByEntityType(ctx, entityType);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain relationship types via entity type with id {}, label {}",
entityType.getID(), entityType.getLabel(), e
);
return null;
}
}
/**
* Get all relationships of the given type linked to the given item.
* @param ctx the DSpace context.
* @param item the item.
* @param relationshipType the relationship type.
* @return a list of relationships (possibly empty), or null in case of error.
*/
protected List<Relationship> getAllRelationships(Context ctx, Item item, RelationshipType relationshipType) {
try {
return relationshipService.findByItemAndRelationshipType(ctx, item, relationshipType, -1, -1, false);
} catch (SQLException e) {
log.error(
"Exception occurred when trying to obtain relationships of type with id {}, rightward name {} " +
"for item with uuid {}, handle {}",
relationshipType.getID(), relationshipType.getRightwardType(), item.getID(), item.getHandle(), e
);
return null;
}
}
/**
* From a list of relationships, find the relationship with the correct relationship type and items.
* If isLeft is true, the provided item should be on the left side of the relationship.
* If isLeft is false, the provided item should be on the right side of the relationship.
* In both cases, the other item is taken from the given relationship.
* @param latestItem the item that should either be on the left or right side of the returned relationship (if any).
* @param isLeft decide on which side of the relationship the provided item should be.
* @param previousItemRelationship the relationship from which the type and the other item are read.
* @param relationships the list of relationships that we'll search through.
* @return the relationship that satisfies the requirements (can only be one or zero).
*/
protected Relationship getMatchingRelationship(
Item latestItem, boolean isLeft, Relationship previousItemRelationship, List<Relationship> relationships
) {
Item leftItem = previousItemRelationship.getLeftItem();
RelationshipType relationshipType = previousItemRelationship.getRelationshipType();
Item rightItem = previousItemRelationship.getRightItem();
if (isLeft) {
return getMatchingRelationship(latestItem, relationshipType, rightItem, relationships);
} else {
return getMatchingRelationship(leftItem, relationshipType, latestItem, relationships);
}
}
/**
* Find the relationship with the given left item, relation type and right item, from a list of relationships.
* @param expectedLeftItem the relationship that we're looking for has this item on the left side.
* @param expectedRelationshipType the relationship that we're looking for has this relationship type.
* @param expectedRightItem the relationship that we're looking for has this item on the right side.
* @param relationships the list of relationships that we'll search through.
* @return the relationship that satisfies the requirements (can only be one or zero).
*/
protected Relationship getMatchingRelationship(
Item expectedLeftItem, RelationshipType expectedRelationshipType, Item expectedRightItem,
List<Relationship> relationships
) {
Integer expectedRelationshipTypeId = expectedRelationshipType.getID();
List<Relationship> matchingRelationships = relationships.stream()
.filter(relationship -> {
int relationshipTypeId = relationship.getRelationshipType().getID();
boolean leftItemMatches = expectedLeftItem.equals(relationship.getLeftItem());
boolean relationshipTypeMatches = expectedRelationshipTypeId == relationshipTypeId;
boolean rightItemMatches = expectedRightItem.equals(relationship.getRightItem());
return leftItemMatches && relationshipTypeMatches && rightItemMatches;
})
.distinct()
.collect(Collectors.toUnmodifiableList());
if (matchingRelationships.isEmpty()) {
return null;
}
// NOTE: this situation should never occur because the relationship table has a unique constraint
// over the "left_id", "type_id" and "right_id" columns
if (matchingRelationships.size() > 1) {
String msg = String.format(
"Illegal state: expected 0 or 1 relationship, but found %s relationships (ids: %s) " +
"of type with id %s, rightward name %s " +
"between left item with uuid %s, handle %s and right item with uuid %s, handle %s",
matchingRelationships.size(),
matchingRelationships.stream().map(Relationship::getID).collect(Collectors.toUnmodifiableList()),
expectedRelationshipTypeId, expectedRelationshipType.getRightwardType(),
expectedLeftItem.getID(), expectedLeftItem.getHandle(),
expectedRightItem.getID(), expectedRightItem.getHandle()
);
log.error(msg);
throw new IllegalStateException(msg);
}
return matchingRelationships.get(0);
}
@Override
public void end(Context ctx) throws Exception {
if (itemsToProcess != null) {

View File

@@ -10,6 +10,7 @@ package org.dspace.versioning.factory;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
/**
* Abstract factory to get services for the versioning package, use VersionServiceFactory.getInstance() to retrieve
@@ -23,6 +24,8 @@ public abstract class VersionServiceFactory {
public abstract VersioningService getVersionService();
public abstract RelationshipVersioningUtils getRelationshipVersioningUtils();
public static VersionServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("versionServiceFactory", VersionServiceFactory.class);

View File

@@ -9,6 +9,7 @@ package org.dspace.versioning.factory;
import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService;
import org.dspace.versioning.utils.RelationshipVersioningUtils;
import org.springframework.beans.factory.annotation.Autowired;
/**
@@ -25,6 +26,9 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory {
@Autowired(required = true)
protected VersioningService versionService;
@Autowired(required = true)
protected RelationshipVersioningUtils relationshipVersioningUtils;
@Override
public VersionHistoryService getVersionHistoryService() {
return versionHistoryService;
@@ -34,4 +38,10 @@ public class VersionServiceFactoryImpl extends VersionServiceFactory {
public VersioningService getVersionService() {
return versionService;
}
@Override
public RelationshipVersioningUtils getRelationshipVersioningUtils() {
return relationshipVersioningUtils;
}
}

View File

@@ -0,0 +1,114 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.versioning.utils;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.LEFT_SIDE_CHANGED;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.NO_CHANGES;
import static org.dspace.versioning.utils.RelationshipVersioningUtils.LatestVersionStatusChangelog.RIGHT_SIDE_CHANGED;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Relationship;
import org.dspace.content.Relationship.LatestVersionStatus;
/**
* Class with utility methods to manipulate relationships that are linked to versioned items.
* Specifically focussed on the "latest version status" of relationships,
* which controls which related items are relevant (visible) to any given item.
*/
public class RelationshipVersioningUtils {
private static final Logger log = LogManager.getLogger(RelationshipVersioningUtils.class);
/**
* Given a latest version status, check if the other side is "latest".
* If we look from the left, this implies BOTH and RIGHT_ONLY return true.
* If we look from the right, this implies BOTH and LEFT_ONLY return true.
* @param isLeft whether we should look from the left or right side.
* @param latestVersionStatus the latest version status.
* @return true if the other side has "latest" status, false otherwise.
*/
public boolean otherSideIsLatest(boolean isLeft, LatestVersionStatus latestVersionStatus) {
if (latestVersionStatus == LatestVersionStatus.BOTH) {
return true;
}
return latestVersionStatus == (isLeft ? LatestVersionStatus.RIGHT_ONLY : LatestVersionStatus.LEFT_ONLY);
}
public enum LatestVersionStatusChangelog {
NO_CHANGES,
LEFT_SIDE_CHANGED,
RIGHT_SIDE_CHANGED
}
/**
* Update {@link Relationship#latestVersionStatus} of the given relationship.
* If isLatest = true, this method will never throw IllegalStateException.
* If isLatest = false, you should make sure that the selected side of given relationship
* currently has "latest" status, otherwise IllegalStateException will be thrown.
* @param relationship the relationship.
* @param updateLeftSide whether the status of the left item or the right item should be updated.
* @param isLatest to what the status should be set.
* @throws IllegalStateException if the operation would result in both the left side and the right side
* being set to non-latest.
*/
public LatestVersionStatusChangelog updateLatestVersionStatus(
Relationship relationship, boolean updateLeftSide, boolean isLatest
) throws IllegalStateException {
LatestVersionStatus lvs = relationship.getLatestVersionStatus();
boolean leftSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.LEFT_ONLY;
boolean rightSideIsLatest = lvs == LatestVersionStatus.BOTH || lvs == LatestVersionStatus.RIGHT_ONLY;
if (updateLeftSide) {
if (leftSideIsLatest == isLatest) {
return NO_CHANGES; // no change needed
}
leftSideIsLatest = isLatest;
} else {
if (rightSideIsLatest == isLatest) {
return NO_CHANGES; // no change needed
}
rightSideIsLatest = isLatest;
}
LatestVersionStatus newVersionStatus;
if (leftSideIsLatest && rightSideIsLatest) {
newVersionStatus = LatestVersionStatus.BOTH;
} else if (leftSideIsLatest) {
newVersionStatus = LatestVersionStatus.LEFT_ONLY;
} else if (rightSideIsLatest) {
newVersionStatus = LatestVersionStatus.RIGHT_ONLY;
} else {
String msg = String.format(
"Illegal state: cannot set %s item to latest = false, because relationship with id %s, " +
"rightward name %s between left item with uuid %s, handle %s and right item with uuid %s, handle %s " +
"has latest version status set to %s",
updateLeftSide ? "left" : "right", relationship.getID(),
relationship.getRelationshipType().getRightwardType(),
relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(),
relationship.getRightItem().getID(), relationship.getRightItem().getHandle(), lvs
);
log.error(msg);
throw new IllegalStateException(msg);
}
log.info(
"set latest version status from {} to {} for relationship with id {}, rightward name {} " +
"between left item with uuid {}, handle {} and right item with uuid {}, handle {}",
lvs, newVersionStatus, relationship.getID(), relationship.getRelationshipType().getRightwardType(),
relationship.getLeftItem().getID(), relationship.getLeftItem().getHandle(),
relationship.getRightItem().getID(), relationship.getRightItem().getHandle()
);
relationship.setLatestVersionStatus(newVersionStatus);
return updateLeftSide ? LEFT_SIDE_CHANGED : RIGHT_SIDE_CHANGED;
}
}

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id UUID NOT NULL UNIQUE,
profile_item_id UUID,
access_token VARCHAR(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id RAW(16) NOT NULL UNIQUE,
profile_item_id RAW(16),
access_token VARCHAR2(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -0,0 +1,10 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class)
ALTER TABLE relationship ADD COLUMN IF NOT EXISTS latest_version_status INTEGER DEFAULT 0 NOT NULL;

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id uuid NOT NULL UNIQUE,
profile_item_id uuid,
access_token VARCHAR(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -43,7 +43,6 @@
class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping">
</bean>
<bean id="pubmedImportService"
class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl">
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
@@ -57,7 +56,6 @@
</property>
</bean>
<bean id="pubmedMetadataFieldMapping"
class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping">
</bean>
@@ -121,6 +119,7 @@
<property name="url" value="${crossref.url}"/>
</bean>
<bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/>
<bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/>
<property name="consumerKey" value="${epo.consumerKey}" />
@@ -134,6 +133,15 @@
</bean>
<bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/>
<bean id="ScopusImportService" class="org.dspace.importer.external.scopus.service.ScopusImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="scopusMetadataFieldMapping"/>
<property name="url" value="${scopus.url}"/>
<property name="apiKey" value="${scopus.apiKey}"/>
<property name="instKey" value="${scopus.instToken}"/>
<property name="viewMode" value="${scopus.search-api.viewMode}"/>
</bean>
<bean id="scopusMetadataFieldMapping" class="org.dspace.importer.external.scopus.service.ScopusFieldMapping"/>
<bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton">
<!-- Set to empty to use the default set of fields -->
<constructor-arg type="java.lang.String" value=""/>
@@ -149,6 +157,27 @@
</bean>
<bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/>
<bean id="WosImportService" class="org.dspace.importer.external.wos.service.WOSImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="wosMetadataFieldMapping"/>
<property name="apiKey" value="${wos.apiKey}"/>
<property name="url" value="${wos.url}"/>
<property name="urlSearch" value="${wos.url.search}"/>
</bean>
<bean id="wosMetadataFieldMapping" class="org.dspace.importer.external.wos.service.WOSFieldMapping"/>
<bean id="PubmedEuropeImportService" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="PubmedEuropeMetadataFieldMapping"/>
<property name="url" value="${pubmedeurope.url}"/>
</bean>
<bean id="PubmedEuropeMetadataFieldMapping" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeFieldMapping"/>
<bean id="CiniiImportService" class="org.dspace.importer.external.cinii.CiniiImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="CiniiMetadataFieldMapping"/>
<property name="url" value="${cinii.url}"/>
<property name="urlSearch" value="${cinii.url.search}"/>
</bean>
<bean id="CiniiMetadataFieldMapping" class="org.dspace.importer.external.cinii.CiniiFieldMapping"/>
<bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton">
<property name="apiKey" value="${ads.key}" />
<property name="url" value="${ads.url}" />
@@ -162,4 +191,4 @@
<constructor-arg value="dc.identifier.other"/>
</bean>
</beans>
</beans>

View File

@@ -145,6 +145,8 @@ useProxies = true
proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true
csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports
# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN
management.health.solrOai.enabled = false
@@ -154,4 +156,4 @@ researcher-profile.entity-type = Person
# These settings ensure "dspace.object.owner" field are indexed by Authority Control
choices.plugin.dspace.object.owner = EPersonAuthority
choices.presentation.dspace.object.owner = suggest
authority.controlled.dspace.object.owner = true
authority.controlled.dspace.object.owner = true

View File

@@ -42,10 +42,10 @@
</bean>
<bean class="org.dspace.external.provider.impl.OrcidV3AuthorDataProvider" init-method="init">
<property name="sourceIdentifier" value="orcid"/>
<property name="orcidUrl" value="${orcid.url}" />
<property name="clientId" value="${orcid.clientid}" />
<property name="clientSecret" value="${orcid.clientsecret}" />
<property name="OAUTHUrl" value="${orcid.oauth.url}" />
<property name="orcidUrl" value="${orcid.domain-url}" />
<property name="clientId" value="${orcid.application-client-id}" />
<property name="clientSecret" value="${orcid.application-client-secret}" />
<property name="OAUTHUrl" value="${orcid.token-url}" />
<property name="orcidRestConnector" ref="orcidRestConnector"/>
<property name="supportedEntityTypes">
<list>
@@ -55,7 +55,7 @@
</bean>
<bean id="orcidRestConnector" class="org.dspace.external.OrcidRestConnector">
<constructor-arg value="${orcid.api.url}"/>
<constructor-arg value="${orcid.api-url}"/>
</bean>
<bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
@@ -70,4 +70,15 @@
</property>
</bean>
<bean id="scopusLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
<property name="metadataSource" ref="ScopusImportService"/>
<property name="sourceIdentifier" value="scopus"/>
<property name="recordIdMetadata" value="dc.identifier.scopus"/>
<property name="supportedEntityTypes">
<list>
<value>Publication</value>
</list>
</property>
</bean>
</beans>

View File

@@ -22,6 +22,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/>
</bean>
<bean id="metadata-export-search" class="org.dspace.app.bulkedit.MetadataExportSearchCliScriptConfiguration">
<property name="description" value="export metadata from a discovery search" />
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportSearchCli" />
</bean>
<bean id="curate"
class="org.dspace.curate.CurationCliScriptConfiguration">
<property name="description"
@@ -45,6 +50,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/>
</bean>
<bean id="solr-database-resync" class="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCliScriptConfiguration">
<property name="description" value="Update the database status of Items in solr"/>
<property name="dspaceRunnableClass" value="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli"/>
</bean>
<bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

Some files were not shown because too many files have changed in this diff Show More