Merge branch 'main' into feature-relationship-versioning-contribution

This commit is contained in:
Bruno Roemers
2022-06-17 10:13:21 +02:00
158 changed files with 13150 additions and 512 deletions

View File

@@ -550,10 +550,17 @@
<groupId>com.ibm.icu</groupId> <groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId> <artifactId>icu4j</artifactId>
</dependency> </dependency>
<!-- Codebase at https://github.com/OCLC-Research/oaiharvester2/ -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>oclc-harvester2</artifactId> <artifactId>oclc-harvester2</artifactId>
</dependency> </dependency>
<!-- Xalan is REQUIRED by 'oclc-harvester2' listed above (OAI harvesting fails without it).
Please do NOT use Xalan in DSpace codebase as it is not well maintained. -->
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>

View File

@@ -0,0 +1,170 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.ParseException;
import org.dspace.content.Item;
import org.dspace.content.MetadataDSpaceCsvExportServiceImpl;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.MetadataDSpaceCsvExportService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableCommunity;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.sort.SortOption;
import org.dspace.utils.DSpace;
/**
* Metadata exporter to allow the batch export of metadata from a discovery search into a file
*
*/
public class MetadataExportSearch extends DSpaceRunnable<MetadataExportSearchScriptConfiguration> {
private static final String EXPORT_CSV = "exportCSV";
private boolean help = false;
private String identifier;
private String discoveryConfigName;
private String[] filterQueryStrings;
private boolean hasScope = false;
private String query;
private SearchService searchService;
private MetadataDSpaceCsvExportService metadataDSpaceCsvExportService;
private EPersonService ePersonService;
private DiscoveryConfigurationService discoveryConfigurationService;
private CommunityService communityService;
private CollectionService collectionService;
private DiscoverQueryBuilder queryBuilder;
@Override
public MetadataExportSearchScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("metadata-export-search", MetadataExportSearchScriptConfiguration.class);
}
@Override
public void setup() throws ParseException {
searchService = SearchUtils.getSearchService();
metadataDSpaceCsvExportService = new DSpace().getServiceManager()
.getServiceByName(
MetadataDSpaceCsvExportServiceImpl.class.getCanonicalName(),
MetadataDSpaceCsvExportService.class
);
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
discoveryConfigurationService = SearchUtils.getConfigurationService();
communityService = ContentServiceFactory.getInstance().getCommunityService();
collectionService = ContentServiceFactory.getInstance().getCollectionService();
queryBuilder = SearchUtils.getQueryBuilder();
if (commandLine.hasOption('h')) {
help = true;
return;
}
if (commandLine.hasOption('q')) {
query = commandLine.getOptionValue('q');
}
if (commandLine.hasOption('s')) {
hasScope = true;
identifier = commandLine.getOptionValue('s');
}
if (commandLine.hasOption('c')) {
discoveryConfigName = commandLine.getOptionValue('c');
}
if (commandLine.hasOption('f')) {
filterQueryStrings = commandLine.getOptionValues('f');
}
}
@Override
public void internalRun() throws Exception {
if (help) {
loghelpinfo();
printHelp();
return;
}
handler.logDebug("starting search export");
IndexableObject dso = null;
Context context = new Context();
context.setCurrentUser(ePersonService.find(context, this.getEpersonIdentifier()));
if (hasScope) {
dso = resolveScope(context, identifier);
}
DiscoveryConfiguration discoveryConfiguration =
discoveryConfigurationService.getDiscoveryConfiguration(discoveryConfigName);
List<QueryBuilderSearchFilter> queryBuilderSearchFilters = new ArrayList<>();
handler.logDebug("processing filter queries");
if (filterQueryStrings != null) {
for (String filterQueryString: filterQueryStrings) {
String field = filterQueryString.split(",", 2)[0];
String operator = filterQueryString.split("(,|=)", 3)[1];
String value = filterQueryString.split("=", 2)[1];
QueryBuilderSearchFilter queryBuilderSearchFilter =
new QueryBuilderSearchFilter(field, operator, value);
queryBuilderSearchFilters.add(queryBuilderSearchFilter);
}
}
handler.logDebug("building query");
DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, dso, discoveryConfiguration, query, queryBuilderSearchFilters,
"Item", 10, Long.getLong("0"), null, SortOption.DESCENDING);
handler.logDebug("creating iterator");
Iterator<Item> itemIterator = searchService.iteratorSearch(context, dso, discoverQuery);
handler.logDebug("creating dspacecsv");
DSpaceCSV dSpaceCSV = metadataDSpaceCsvExportService.export(context, itemIterator, true);
handler.logDebug("writing to file " + getFileNameOrExportFile());
handler.writeFilestream(context, getFileNameOrExportFile(), dSpaceCSV.getInputStream(), EXPORT_CSV);
context.restoreAuthSystemState();
context.complete();
}
protected void loghelpinfo() {
handler.logInfo("metadata-export");
}
protected String getFileNameOrExportFile() {
return "metadataExportSearch.csv";
}
public IndexableObject resolveScope(Context context, String id) throws SQLException {
UUID uuid = UUID.fromString(id);
IndexableObject scopeObj = new IndexableCommunity(communityService.find(context, uuid));
if (scopeObj.getIndexedObject() == null) {
scopeObj = new IndexableCollection(collectionService.find(context, uuid));
}
return scopeObj;
}
}

View File

@@ -0,0 +1,20 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
/**
* The cli version of the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchCli extends MetadataExportSearch {
@Override
protected String getFileNameOrExportFile() {
return commandLine.getOptionValue('n');
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
/**
* This is the CLI version of the {@link MetadataExportSearchScriptConfiguration} class that handles the
* configuration for the {@link MetadataExportSearchCli} script
*/
public class MetadataExportSearchCliScriptConfiguration
extends MetadataExportSearchScriptConfiguration<MetadataExportSearchCli> {
@Override
public Options getOptions() {
Options options = super.getOptions();
options.addOption("n", "filename", true, "the filename to export to");
return super.getOptions();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link MetadataExportSearch} script
*/
public class MetadataExportSearchScriptConfiguration<T extends MetadataExportSearch> extends ScriptConfiguration<T> {
private Class<T> dspaceRunnableclass;
@Override
public Class<T> getDspaceRunnableClass() {
return dspaceRunnableclass;
}
@Override
public void setDspaceRunnableClass(Class<T> dspaceRunnableClass) {
this.dspaceRunnableclass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
Options options = new Options();
options.addOption("q", "query", true,
"The discovery search string to will be used to match records. Not URL encoded");
options.getOption("q").setType(String.class);
options.addOption("s", "scope", true,
"UUID of a specific DSpace container (site, community or collection) to which the search has to be " +
"limited");
options.getOption("s").setType(String.class);
options.addOption("c", "configuration", true,
"The name of a Discovery configuration that should be used by this search");
options.getOption("c").setType(String.class);
options.addOption("f", "filter", true,
"Advanced search filter that has to be used to filter the result set, with syntax `<:filter-name>," +
"<:filter-operator>=<:filter-value>`. Not URL encoded. For example `author," +
"authority=5df05073-3be7-410d-8166-e254369e4166` or `title,contains=sample text`");
options.getOption("f").setType(String.class);
options.addOption("h", "help", false, "help");
super.options = options;
}
return options;
}
}

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.dspace.content.Item;
import org.dspace.core.ReloadableEntity;
import org.dspace.eperson.EPerson;
/**
* Entity that stores ORCID access-token related to a given eperson or a given
* profile item.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Entity
@Table(name = "orcid_token")
public class OrcidToken implements ReloadableEntity<Integer> {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "orcid_token_id_seq")
@SequenceGenerator(name = "orcid_token_id_seq", sequenceName = "orcid_token_id_seq", allocationSize = 1)
private Integer id;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "eperson_id")
protected EPerson ePerson;
@OneToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "profile_item_id")
private Item profileItem;
@Column(name = "access_token")
private String accessToken;
@Override
public Integer getID() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public EPerson getEPerson() {
return ePerson;
}
public void setEPerson(EPerson eperson) {
this.ePerson = eperson;
}
public Item getProfileItem() {
return profileItem;
}
public void setProfileItem(Item profileItem) {
this.profileItem = profileItem;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import org.dspace.app.orcid.exception.OrcidClientException;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.orcid.jaxb.model.v3.release.record.Person;
/**
* Interface for classes that allow to contact ORCID.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidClient {
/**
* Exchange the authorization code for an ORCID iD and 3-legged access token.
* The authorization code expires upon use.
*
* @param code the authorization code
* @return the ORCID token
* @throws OrcidClientException if some error occurs during the exchange
*/
OrcidTokenResponseDTO getAccessToken(String code);
/**
* Retrieves a summary of the ORCID person related to the given orcid.
*
* @param accessToken the access token
* @param orcid the orcid id of the record to retrieve
* @return the Person
* @throws OrcidClientException if some error occurs during the search
*/
Person getPerson(String accessToken, String orcid);
}

View File

@@ -0,0 +1,167 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import static org.apache.http.client.methods.RequestBuilder.get;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamReader;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicNameValuePair;
import org.dspace.app.orcid.exception.OrcidClientException;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.util.ThrowingSupplier;
import org.orcid.jaxb.model.v3.release.record.Person;
/**
* Implementation of {@link OrcidClient}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidClientImpl implements OrcidClient {
private final OrcidConfiguration orcidConfiguration;
private final ObjectMapper objectMapper;
public OrcidClientImpl(OrcidConfiguration orcidConfiguration) {
this.orcidConfiguration = orcidConfiguration;
this.objectMapper = new ObjectMapper();
}
@Override
public OrcidTokenResponseDTO getAccessToken(String code) {
List<NameValuePair> params = new ArrayList<NameValuePair>();
params.add(new BasicNameValuePair("code", code));
params.add(new BasicNameValuePair("grant_type", "authorization_code"));
params.add(new BasicNameValuePair("client_id", orcidConfiguration.getClientId()));
params.add(new BasicNameValuePair("client_secret", orcidConfiguration.getClientSecret()));
HttpUriRequest httpUriRequest = RequestBuilder.post(orcidConfiguration.getTokenEndpointUrl())
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.addHeader("Accept", "application/json")
.setEntity(new UrlEncodedFormEntity(params, Charset.defaultCharset()))
.build();
return executeAndParseJson(httpUriRequest, OrcidTokenResponseDTO.class);
}
@Override
public Person getPerson(String accessToken, String orcid) {
HttpUriRequest httpUriRequest = buildGetUriRequest(accessToken, "/" + orcid + "/person");
return executeAndUnmarshall(httpUriRequest, false, Person.class);
}
private HttpUriRequest buildGetUriRequest(String accessToken, String relativePath) {
return get(orcidConfiguration.getApiUrl() + relativePath.trim())
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.addHeader("Authorization", "Bearer " + accessToken)
.build();
}
private <T> T executeAndParseJson(HttpUriRequest httpUriRequest, Class<T> clazz) {
HttpClient client = HttpClientBuilder.create().build();
return executeAndReturns(() -> {
HttpResponse response = client.execute(httpUriRequest);
if (isNotSuccessfull(response)) {
throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response));
}
return objectMapper.readValue(response.getEntity().getContent(), clazz);
});
}
private <T> T executeAndUnmarshall(HttpUriRequest httpUriRequest, boolean handleNotFoundAsNull, Class<T> clazz) {
HttpClient client = HttpClientBuilder.create().build();
return executeAndReturns(() -> {
HttpResponse response = client.execute(httpUriRequest);
if (handleNotFoundAsNull && isNotFound(response)) {
return null;
}
if (isNotSuccessfull(response)) {
throw new OrcidClientException(getStatusCode(response), formatErrorMessage(response));
}
return unmarshall(response.getEntity(), clazz);
});
}
private <T> T executeAndReturns(ThrowingSupplier<T, Exception> supplier) {
try {
return supplier.get();
} catch (OrcidClientException ex) {
throw ex;
} catch (Exception ex) {
throw new OrcidClientException(ex);
}
}
@SuppressWarnings("unchecked")
private <T> T unmarshall(HttpEntity entity, Class<T> clazz) throws Exception {
JAXBContext jaxbContext = JAXBContext.newInstance(clazz);
XMLInputFactory xmlInputFactory = XMLInputFactory.newFactory();
xmlInputFactory.setProperty(XMLInputFactory.SUPPORT_DTD, false);
XMLStreamReader xmlStreamReader = xmlInputFactory.createXMLStreamReader(entity.getContent());
Unmarshaller unmarshaller = jaxbContext.createUnmarshaller();
return (T) unmarshaller.unmarshal(xmlStreamReader);
}
private String formatErrorMessage(HttpResponse response) {
try {
return IOUtils.toString(response.getEntity().getContent(), Charset.defaultCharset());
} catch (UnsupportedOperationException | IOException e) {
return "Generic error";
}
}
private boolean isNotSuccessfull(HttpResponse response) {
int statusCode = getStatusCode(response);
return statusCode < 200 || statusCode > 299;
}
private boolean isNotFound(HttpResponse response) {
return getStatusCode(response) == HttpStatus.SC_NOT_FOUND;
}
private int getStatusCode(HttpResponse response) {
return response.getStatusLine().getStatusCode();
}
}

View File

@@ -0,0 +1,90 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.client;
import org.apache.commons.lang3.StringUtils;
/**
* A class that contains all the configurations related to ORCID.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public final class OrcidConfiguration {
private String apiUrl;
private String redirectUrl;
private String clientId;
private String clientSecret;
private String tokenEndpointUrl;
private String authorizeEndpointUrl;
private String scopes;
public String getApiUrl() {
return apiUrl;
}
public void setApiUrl(String apiUrl) {
this.apiUrl = apiUrl;
}
public String getRedirectUrl() {
return redirectUrl;
}
public void setRedirectUrl(String redirectUrl) {
this.redirectUrl = redirectUrl;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getTokenEndpointUrl() {
return tokenEndpointUrl;
}
public void setTokenEndpointUrl(String tokenEndpointUrl) {
this.tokenEndpointUrl = tokenEndpointUrl;
}
public String getAuthorizeEndpointUrl() {
return authorizeEndpointUrl;
}
public void setAuthorizeEndpointUrl(String authorizeEndpointUrl) {
this.authorizeEndpointUrl = authorizeEndpointUrl;
}
public void setScopes(String scopes) {
this.scopes = scopes;
}
public String[] getScopes() {
return StringUtils.isNotBlank(scopes) ? StringUtils.split(scopes, ",") : new String[] {};
}
}

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.dao;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import org.dspace.eperson.EPerson;
/**
* Database Access Object interface class for the OrcidToken object. The
* implementation of this class is responsible for all database calls for the
* OrcidToken object and is autowired by spring. This class should only be
* accessed from a single service and should never be exposed outside of the API
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidTokenDAO extends GenericDAO<OrcidToken> {
/**
* Find an OrcidToken by ePerson.
*
* @param context the DSpace context
* @param ePerson the ePerson to search for
* @return the Orcid token, if any
*/
public OrcidToken findByEPerson(Context context, EPerson ePerson);
/**
* Find an OrcidToken by profileItem.
*
* @param context the DSpace context
* @param profileItem the profile item to search for
* @return the Orcid token, if any
*/
public OrcidToken findByProfileItem(Context context, Item profileItem);
}

View File

@@ -0,0 +1,50 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.dao.impl;
import java.sql.SQLException;
import javax.persistence.Query;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.dao.OrcidTokenDAO;
import org.dspace.content.Item;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Implementation of {@link OrcidTokenDAO}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidTokenDAOImpl extends AbstractHibernateDAO<OrcidToken> implements OrcidTokenDAO {
@Override
public OrcidToken findByEPerson(Context context, EPerson ePerson) {
try {
Query query = createQuery(context, "FROM OrcidToken WHERE ePerson = :ePerson");
query.setParameter("ePerson", ePerson);
return singleResult(query);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public OrcidToken findByProfileItem(Context context, Item profileItem) {
try {
Query query = createQuery(context, "FROM OrcidToken WHERE profileItem = :profileItem");
query.setParameter("profileItem", profileItem);
return singleResult(query);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,48 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.exception;
/**
* Exception throwable from class that implements {@link OrcidClient} in case of
* error response from the ORCID registry.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidClientException extends RuntimeException {
public static final String INVALID_GRANT_MESSAGE = "invalid_grant";
private static final long serialVersionUID = -7618061110212398216L;
private int status = 0;
public OrcidClientException(int status, String content) {
super(content);
this.status = status;
}
public OrcidClientException(Throwable cause) {
super(cause);
}
public int getStatus() {
return this.status;
}
/**
* Returns true if the exception is related to an invalid grant error
* (authentication code non valid), false otherwise
*
* @return the check result
*/
public boolean isInvalidGrantException() {
return getMessage() != null && getMessage().contains(INVALID_GRANT_MESSAGE);
}
}

View File

@@ -0,0 +1,28 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.model;
/**
* The types of activities defined on ORCID that can be synchronized.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidEntityType {
/**
* The publication/work activity.
*/
PUBLICATION,
/**
* The funding activity.
*/
FUNDING;
}

View File

@@ -0,0 +1,135 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.StringUtils;
/**
* This class map the response from and ORCID token endpoint.
*
* Response example:
*
* {
* "access_token":"f5af9f51-07e6-4332-8f1a-c0c11c1e3728",
* "token_type":"bearer",
* "refresh_token":"f725f747-3a65-49f6-a231-3e8944ce464d",
* "expires_in":631138518,
* "scope":"/read-limited",
* "name":"Sofia Garcia",
* "orcid":"0000-0001-2345-6789"
* }
*
* @author Luca Giamminonni (luca.giamminonni at 4Science.it)
*
*/
public class OrcidTokenResponseDTO {
/**
* The access token release by the authorization server this is the most
* relevant item, because it allow the server to access to the user resources as
* defined in the scopes.
*/
@JsonProperty("access_token")
private String accessToken;
/**
* The refresh token as defined in the OAuth standard.
*/
@JsonProperty("refresh_token")
private String refreshToken;
/**
* It will be "bearer".
*/
@JsonProperty("token_type")
private String tokenType;
/**
* The expiration timestamp in millis.
*/
@JsonProperty("expires_in")
private int expiresIn;
/**
* List of scopes.
*/
private String scope;
/**
* The ORCID user name.
*/
private String name;
/**
* The ORCID user id.
*/
private String orcid;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getOrcid() {
return orcid;
}
public void setOrcid(String orcid) {
this.orcid = orcid;
}
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getRefreshToken() {
return refreshToken;
}
public void setRefreshToken(String refreshToken) {
this.refreshToken = refreshToken;
}
public String getTokenType() {
return tokenType;
}
public void setTokenType(String tokenType) {
this.tokenType = tokenType;
}
public int getExpiresIn() {
return expiresIn;
}
public void setExpiresIn(int expiresIn) {
this.expiresIn = expiresIn;
}
public String getScope() {
return scope;
}
public void setScope(String scope) {
this.scope = scope;
}
@JsonIgnore
public String[] getScopeAsArray() {
return StringUtils.isEmpty(getScope()) ? new String[] {} : getScope().split(" ");
}
}

View File

@@ -0,0 +1,147 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service;
import java.sql.SQLException;
import java.util.List;
import java.util.Optional;
import org.dspace.app.orcid.model.OrcidEntityType;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.profile.OrcidEntitySyncPreference;
import org.dspace.app.profile.OrcidProfileDisconnectionMode;
import org.dspace.app.profile.OrcidProfileSyncPreference;
import org.dspace.app.profile.OrcidSynchronizationMode;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Service that handle the the syncronization between a DSpace profile and the
* relative ORCID profile, if any.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/
public interface OrcidSynchronizationService {
/**
* Check if the given item is linked to an ORCID profile.
*
* @param context the relevant DSpace Context.
* @param item the item to check
* @return true if the given item is linked to ORCID
*/
boolean isLinkedToOrcid(Context context, Item item);
/**
* Configure the given profile with the data present in the given ORCID token.
* This action is required to synchronize profile and related entities with
* ORCID. No security check is done, it is therefore the caller's responsibility
* to verify for example that the current user has permission to connect the
* profile to ORCID (if necessary).
*
* @param context the relevant DSpace Context.
* @param profile the profile to configure
* @param token the ORCID token
* @throws SQLException if a SQL error occurs during the profile update
*/
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException;
/**
* Disconnect the given profile from ORCID.
*
* @param context the relevant DSpace Context.
* @param profile the profile to disconnect
* @throws SQLException if a SQL error occurs during the profile update
*/
public void unlinkProfile(Context context, Item profile) throws SQLException;
/**
* Set the synchronization preference for the given profile related to the given
* ORCID entity type.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param entityType the orcid entity type
* @param value the new synchronization preference value
* @return true if the value has actually been updated,
* false if the value to be set is the same as
* the one already configured
* @throws SQLException if a SQL error occurs during the profile
* update
* @throws IllegalArgumentException if the given researcher profile is no linked
* with an ORCID account
*/
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType entityType,
OrcidEntitySyncPreference value) throws SQLException;
/**
* Update the profile's synchronization preference for the given profile.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param values the new synchronization preference values
* @return true if the value has actually been updated,
* false if the value to be set is the same as
* the one already configured
* @throws SQLException if a SQL error occurs during the profile
* update
* @throws IllegalArgumentException if the given researcher profile is no linked
* with an ORCID account
*/
public boolean setProfilePreference(Context context, Item profile,
List<OrcidProfileSyncPreference> values) throws SQLException;
/**
* Set the ORCID synchronization mode for the given profile.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param value the new synchronization mode value
* @return true if the value has actually been updated, false if
* the value to be set is the same as the one already
* configured
* @throws SQLException if a SQL error occurs during the profile update
*/
public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value)
throws SQLException;
/**
* Returns the ORCID synchronization mode configured for the given profile item.
*
* @param profile the researcher profile item
* @return the synchronization mode
*/
Optional<OrcidSynchronizationMode> getSynchronizationMode(Item profile);
/**
* Returns the ORCID synchronization preference related to the given entity type
* configured for the given profile item.
*
* @param profile the researcher profile item
* @param entityType the orcid entity type
* @return the configured preference
*/
Optional<OrcidEntitySyncPreference> getEntityPreference(Item profile, OrcidEntityType entityType);
/**
* Returns the ORCID synchronization preferences related to the profile itself
* configured for the given profile item.
*
* @param profile the researcher profile item
* @return the synchronization mode
*/
List<OrcidProfileSyncPreference> getProfilePreferences(Item profile);
/**
* Returns the configuration ORCID profile's disconnection mode. If that mode is
* not configured or the configuration is wrong, the value DISABLED is returned.
*
* @return the disconnection mode
*/
OrcidProfileDisconnectionMode getDisconnectionMode();
}

View File

@@ -0,0 +1,92 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Service that handle {@link OrcidToken} entities.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface OrcidTokenService {
/**
* Creates a new OrcidToken entity for the given ePerson and accessToken.
*
* @param context the DSpace context
* @param ePerson the EPerson
* @param accessToken the access token
* @return the created entity instance
*/
public OrcidToken create(Context context, EPerson ePerson, String accessToken);
/**
* Creates a new OrcidToken entity for the given ePerson and accessToken.
*
* @param context the DSpace context
* @param ePerson the EPerson
* @param profileItem the profile item
* @param accessToken the access token
* @return the created entity instance
*/
public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken);
/**
* Find an OrcidToken by ePerson.
*
* @param context the DSpace context
* @param ePerson the ePerson to search for
* @return the Orcid token, if any
*/
public OrcidToken findByEPerson(Context context, EPerson ePerson);
/**
* Find an OrcidToken by profileItem.
*
* @param context the DSpace context
* @param profileItem the profile item to search for
* @return the Orcid token, if any
*/
public OrcidToken findByProfileItem(Context context, Item profileItem);
/**
* Delete the given ORCID token entity.
*
* @param context the DSpace context
* @param orcidToken the ORCID token entity to delete
*/
public void delete(Context context, OrcidToken orcidToken);
/**
* Delete all the ORCID token entities.
*
* @param context the DSpace context
*/
public void deleteAll(Context context);
/**
* Deletes the ORCID token entity related to the given EPerson.
*
* @param context the DSpace context
* @param ePerson the ePerson for the deletion
*/
public void deleteByEPerson(Context context, EPerson ePerson);
/**
* Deletes the ORCID token entity related to the given profile item.
*
* @param context the DSpace context
* @param profileItem the item for the deletion
*/
public void deleteByProfileItem(Context context, Item profileItem);
}

View File

@@ -0,0 +1,273 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service.impl;
import static java.time.LocalDateTime.now;
import static java.time.format.DateTimeFormatter.ISO_DATE_TIME;
import static java.util.List.of;
import static java.util.Optional.ofNullable;
import static org.apache.commons.lang3.EnumUtils.isValidEnum;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.dspace.content.Item.ANY;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.codec.binary.StringUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.model.OrcidEntityType;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.OrcidEntitySyncPreference;
import org.dspace.app.profile.OrcidProfileDisconnectionMode;
import org.dspace.app.profile.OrcidProfileSyncPreference;
import org.dspace.app.profile.OrcidSynchronizationMode;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link OrcidSynchronizationService}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidSynchronizationServiceImpl implements OrcidSynchronizationService {
@Autowired
private ItemService itemService;
@Autowired
private ConfigurationService configurationService;
@Autowired
private EPersonService ePersonService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public void linkProfile(Context context, Item profile, OrcidTokenResponseDTO token) throws SQLException {
EPerson ePerson = ePersonService.findByProfileItem(context, profile);
if (ePerson == null) {
throw new IllegalArgumentException(
"The given profile item is not related to any eperson. Item id: " + profile.getID());
}
String orcid = token.getOrcid();
String accessToken = token.getAccessToken();
String[] scopes = token.getScopeAsArray();
itemService.setMetadataSingleValue(context, profile, "person", "identifier", "orcid", null, orcid);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
for (String scope : scopes) {
itemService.addMetadata(context, profile, "dspace", "orcid", "scope", null, scope);
}
if (isBlank(itemService.getMetadataFirstValue(profile, "dspace", "orcid", "authenticated", Item.ANY))) {
String currentDate = ISO_DATE_TIME.format(now());
itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "authenticated", null, currentDate);
}
setAccessToken(context, profile, ePerson, accessToken);
EPerson ePersonByOrcid = ePersonService.findByNetid(context, orcid);
if (ePersonByOrcid == null && isBlank(ePerson.getNetid())) {
ePerson.setNetid(orcid);
updateEPerson(context, ePerson);
}
updateItem(context, profile);
}
@Override
public void unlinkProfile(Context context, Item profile) throws SQLException {
itemService.clearMetadata(context, profile, "person", "identifier", "orcid", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "scope", Item.ANY);
itemService.clearMetadata(context, profile, "dspace", "orcid", "authenticated", Item.ANY);
orcidTokenService.deleteByProfileItem(context, profile);
updateItem(context, profile);
}
@Override
public boolean setEntityPreference(Context context, Item profile, OrcidEntityType type,
OrcidEntitySyncPreference value) throws SQLException {
String metadataQualifier = "sync-" + type.name().toLowerCase() + "s";
return updatePreferenceForSynchronizingWithOrcid(context, profile, metadataQualifier, of(value.name()));
}
@Override
public boolean setProfilePreference(Context context, Item profile, List<OrcidProfileSyncPreference> values)
throws SQLException {
List<String> valuesAsString = values.stream()
.map(OrcidProfileSyncPreference::name)
.collect(Collectors.toList());
return updatePreferenceForSynchronizingWithOrcid(context, profile, "sync-profile", valuesAsString);
}
@Override
public boolean setSynchronizationMode(Context context, Item profile, OrcidSynchronizationMode value)
throws SQLException {
if (!isLinkedToOrcid(context, profile)) {
throw new IllegalArgumentException("The given profile cannot be configured for the ORCID "
+ "synchronization because it is not linked to any ORCID account: "
+ profile.getID());
}
String newValue = value.name();
String oldValue = itemService.getMetadataFirstValue(profile, "dspace", "orcid", "sync-mode", Item.ANY);
if (StringUtils.equals(oldValue, newValue)) {
return false;
} else {
itemService.setMetadataSingleValue(context, profile, "dspace", "orcid", "sync-mode", null, value.name());
return true;
}
}
@Override
public Optional<OrcidSynchronizationMode> getSynchronizationMode(Item item) {
return getMetadataValue(item, "dspace.orcid.sync-mode")
.map(metadataValue -> metadataValue.getValue())
.filter(value -> isValidEnum(OrcidSynchronizationMode.class, value))
.map(value -> OrcidSynchronizationMode.valueOf(value));
}
@Override
public Optional<OrcidEntitySyncPreference> getEntityPreference(Item item, OrcidEntityType entityType) {
return getMetadataValue(item, "dspace.orcid.sync-" + entityType.name().toLowerCase() + "s")
.map(metadataValue -> metadataValue.getValue())
.filter(value -> isValidEnum(OrcidEntitySyncPreference.class, value))
.map(value -> OrcidEntitySyncPreference.valueOf(value));
}
@Override
public List<OrcidProfileSyncPreference> getProfilePreferences(Item item) {
return getMetadataValues(item, "dspace.orcid.sync-profile")
.map(MetadataValue::getValue)
.filter(value -> isValidEnum(OrcidProfileSyncPreference.class, value))
.map(value -> OrcidProfileSyncPreference.valueOf(value))
.collect(Collectors.toList());
}
@Override
public boolean isLinkedToOrcid(Context context, Item item) {
return getOrcidAccessToken(context, item).isPresent() && getOrcid(item).isPresent();
}
@Override
public OrcidProfileDisconnectionMode getDisconnectionMode() {
String value = configurationService.getProperty("orcid.disconnection.allowed-users");
if (!OrcidProfileDisconnectionMode.isValid(value)) {
return OrcidProfileDisconnectionMode.DISABLED;
}
return OrcidProfileDisconnectionMode.fromString(value);
}
private void setAccessToken(Context context, Item profile, EPerson ePerson, String accessToken) {
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, ePerson);
if (orcidToken == null) {
orcidTokenService.create(context, ePerson, profile, accessToken);
} else {
orcidToken.setProfileItem(profile);
orcidToken.setAccessToken(accessToken);
}
}
private boolean updatePreferenceForSynchronizingWithOrcid(Context context, Item profile,
String metadataQualifier,
List<String> values) throws SQLException {
if (!isLinkedToOrcid(context, profile)) {
throw new IllegalArgumentException("The given profile cannot be configured for the ORCID "
+ "synchronization because it is not linked to any ORCID account: "
+ profile.getID());
}
List<String> oldValues = itemService.getMetadata(profile, "dspace", "orcid", metadataQualifier, ANY).stream()
.map(metadataValue -> metadataValue.getValue())
.collect(Collectors.toList());
if (containsSameValues(oldValues, values)) {
return false;
}
itemService.clearMetadata(context, profile, "dspace", "orcid", metadataQualifier, ANY);
for (String value : values) {
itemService.addMetadata(context, profile, "dspace", "orcid", metadataQualifier, null, value);
}
return true;
}
private boolean containsSameValues(List<String> firstList, List<String> secondList) {
return new HashSet<>(firstList).equals(new HashSet<>(secondList));
}
private Optional<String> getOrcidAccessToken(Context context, Item item) {
return ofNullable(orcidTokenService.findByProfileItem(context, item))
.map(orcidToken -> orcidToken.getAccessToken());
}
public Optional<String> getOrcid(Item item) {
return getMetadataValue(item, "person.identifier.orcid")
.map(metadataValue -> metadataValue.getValue());
}
private Optional<MetadataValue> getMetadataValue(Item item, String metadataField) {
return getMetadataValues(item, metadataField).findFirst();
}
private Stream<MetadataValue> getMetadataValues(Item item, String metadataField) {
return item.getMetadata().stream()
.filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.')));
}
private void updateItem(Context context, Item item) throws SQLException {
try {
context.turnOffAuthorisationSystem();
itemService.update(context, item);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
} finally {
context.restoreAuthSystemState();
}
}
private void updateEPerson(Context context, EPerson ePerson) throws SQLException {
try {
ePersonService.update(context, ePerson);
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,99 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.orcid.service.impl;
import java.sql.SQLException;
import java.util.List;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.dao.OrcidTokenDAO;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link OrcidTokenService}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidTokenServiceImpl implements OrcidTokenService {
@Autowired
private OrcidTokenDAO orcidTokenDAO;
@Override
public OrcidToken create(Context context, EPerson ePerson, String accessToken) {
return create(context, ePerson, null, accessToken);
}
@Override
public OrcidToken create(Context context, EPerson ePerson, Item profileItem, String accessToken) {
OrcidToken orcidToken = new OrcidToken();
orcidToken.setAccessToken(accessToken);
orcidToken.setEPerson(ePerson);
orcidToken.setProfileItem(profileItem);
try {
return orcidTokenDAO.create(context, orcidToken);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public OrcidToken findByEPerson(Context context, EPerson ePerson) {
return orcidTokenDAO.findByEPerson(context, ePerson);
}
@Override
public OrcidToken findByProfileItem(Context context, Item profileItem) {
return orcidTokenDAO.findByProfileItem(context, profileItem);
}
@Override
public void delete(Context context, OrcidToken orcidToken) {
try {
orcidTokenDAO.delete(context, orcidToken);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void deleteAll(Context context) {
try {
List<OrcidToken> tokens = orcidTokenDAO.findAll(context, OrcidToken.class);
for (OrcidToken token : tokens) {
delete(context, token);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void deleteByEPerson(Context context, EPerson ePerson) {
OrcidToken orcidToken = findByEPerson(context, ePerson);
if (orcidToken != null) {
delete(context, orcidToken);
}
}
@Override
public void deleteByProfileItem(Context context, Item profileItem) {
OrcidToken orcidToken = findByProfileItem(context, profileItem);
if (orcidToken != null) {
delete(context, orcidToken);
}
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* preferences.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidEntitySyncPreference {
/**
* Preference to be set to disable the synchronization with ORCID of the
* specific entity.
*/
DISABLED,
/**
* Preference to be set to enable the synchronization with ORCID of all items
* relating to the specific entity.
*/
ALL
}

View File

@@ -0,0 +1,97 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static java.time.LocalDateTime.now;
import static java.time.format.DateTimeFormatter.ISO_DATE_TIME;
import static org.apache.commons.collections.CollectionUtils.isNotEmpty;
import static org.dspace.content.Item.ANY;
import java.sql.SQLException;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.collections.CollectionUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.service.AfterResearcherProfileCreationAction;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
/**
* Implementation of {@link AfterResearcherProfileCreationAction} that copy the
* ORCID metadata, if any, from the owner to the researcher profile item.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
@Order(Ordered.HIGHEST_PRECEDENCE)
public class OrcidMetadataCopyingAction implements AfterResearcherProfileCreationAction {
@Autowired
private ItemService itemService;
@Autowired
private EPersonService ePersonService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException {
Item item = researcherProfile.getItem();
copyMetadataValues(context, owner, "eperson.orcid", item, "person.identifier.orcid");
copyMetadataValues(context, owner, "eperson.orcid.scope", item, "dspace.orcid.scope");
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, owner);
if (orcidToken != null) {
orcidToken.setProfileItem(item);
}
if (isLinkedToOrcid(owner, orcidToken)) {
String currentDate = ISO_DATE_TIME.format(now());
itemService.setMetadataSingleValue(context, item, "dspace", "orcid", "authenticated", null, currentDate);
}
}
private void copyMetadataValues(Context context, EPerson ePerson, String ePersonMetadataField, Item item,
String itemMetadataField) throws SQLException {
List<String> values = getMetadataValues(ePerson, ePersonMetadataField);
if (CollectionUtils.isEmpty(values)) {
return;
}
MetadataFieldName metadata = new MetadataFieldName(itemMetadataField);
itemService.clearMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, ANY);
itemService.addMetadata(context, item, metadata.schema, metadata.element, metadata.qualifier, null, values);
}
private boolean isLinkedToOrcid(EPerson ePerson, OrcidToken orcidToken) {
return isNotEmpty(getMetadataValues(ePerson, "eperson.orcid")) && orcidToken != null;
}
private List<String> getMetadataValues(EPerson ePerson, String metadataField) {
return ePersonService.getMetadataByMetadataString(ePerson, metadataField).stream()
.map(MetadataValue::getValue)
.collect(Collectors.toList());
}
}

View File

@@ -0,0 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static org.apache.commons.lang3.EnumUtils.isValidEnum;
/**
* Enum that models all the available values of the property that which
* determines which users can disconnect a profile from an ORCID account.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidProfileDisconnectionMode {
/**
* The disconnection is disabled.
*/
DISABLED,
/**
* Only the profile's owner can disconnect that profile from ORCID.
*/
ONLY_OWNER,
/**
* Only the admins can disconnect profiles from ORCID.
*/
ONLY_ADMIN,
/**
* Only the admin or the profile's owner can disconnect that profile from ORCID.
*/
ADMIN_AND_OWNER;
public static boolean isValid(String mode) {
return mode != null ? isValidEnum(OrcidProfileDisconnectionMode.class, mode.toUpperCase()) : false;
}
public static OrcidProfileDisconnectionMode fromString(String mode) {
return isValid(mode) ? OrcidProfileDisconnectionMode.valueOf(mode.toUpperCase()) : null;
}
}

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* preferences for the user's profile.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidProfileSyncPreference {
/**
* Data relating to the name, country and keywords of the ORCID profile.
*/
BIOGRAPHICAL,
/**
* Data relating to external identifiers and researcher urls of the ORCID
* profile.
*/
IDENTIFIERS;
}

View File

@@ -0,0 +1,29 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
/**
* Enum that model the allowed values to configure the ORCID synchronization
* mode.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public enum OrcidSynchronizationMode {
/**
* Mode in which the user can manually decide when to synchronize data with
* ORCID.
*/
MANUAL,
/**
* Mode in which synchronizations with ORCID occur through an automatic process.
*/
BATCH;
}

View File

@@ -63,6 +63,11 @@ public class ResearcherProfile {
return item; return item;
} }
public Optional<String> getOrcid() {
return getMetadataValue(item, "person.identifier.orcid")
.map(metadataValue -> metadataValue.getValue());
}
private MetadataValue getDspaceObjectOwnerMetadata(Item item) { private MetadataValue getDspaceObjectOwnerMetadata(Item item) {
return getMetadataValue(item, "dspace.object.owner") return getMetadataValue(item, "dspace.object.owner")
.filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null) .filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null)

View File

@@ -18,14 +18,18 @@ import static org.dspace.eperson.Group.ANONYMOUS;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import javax.annotation.PostConstruct;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.dspace.app.exception.ResourceAlreadyExistsException; import org.dspace.app.exception.ResourceAlreadyExistsException;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.profile.service.AfterResearcherProfileCreationAction;
import org.dspace.app.profile.service.ResearcherProfileService; import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
@@ -88,6 +92,21 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
@Autowired @Autowired
private AuthorizeService authorizeService; private AuthorizeService authorizeService;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired(required = false)
private List<AfterResearcherProfileCreationAction> afterCreationActions;
@PostConstruct
public void postConstruct() {
if (afterCreationActions == null) {
afterCreationActions = Collections.emptyList();
}
}
@Override @Override
public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException { public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException {
Assert.notNull(id, "An id must be provided to find a researcher profile"); Assert.notNull(id, "An id must be provided to find a researcher profile");
@@ -113,15 +132,16 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
.orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles")); .orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles"));
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
try { Item item = createProfileItem(context, ePerson, collection);
context.restoreAuthSystemState();
Item item = createProfileItem(context, ePerson, collection); ResearcherProfile researcherProfile = new ResearcherProfile(item);
return new ResearcherProfile(item);
} finally { for (AfterResearcherProfileCreationAction afterCreationAction : afterCreationActions) {
context.restoreAuthSystemState(); afterCreationAction.perform(context, researcherProfile, ePerson);
} }
return researcherProfile;
} }
@Override @Override
@@ -137,6 +157,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
deleteItem(context, profileItem); deleteItem(context, profileItem);
} else { } else {
removeOwnerMetadata(context, profileItem); removeOwnerMetadata(context, profileItem);
orcidSynchronizationService.unlinkProfile(context, profileItem);
} }
} }

View File

@@ -0,0 +1,35 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile.service;
import java.sql.SQLException;
import org.dspace.app.profile.ResearcherProfile;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Interface to mark classes that allow to perform additional logic on created
* researcher profile.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface AfterResearcherProfileCreationAction {
/**
* Perform some actions on the given researcher profile and returns the updated
* profile.
*
* @param context the DSpace context
* @param researcherProfile the created researcher profile
* @param owner the EPerson that is owner of the given profile
* @throws SQLException if a SQL error occurs
*/
void perform(Context context, ResearcherProfile researcherProfile, EPerson owner) throws SQLException;
}

View File

@@ -0,0 +1,175 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.commons.cli.ParseException;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.core.Context;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.IndexingService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory;
import org.dspace.scripts.DSpaceRunnable;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.util.SolrUtils;
import org.dspace.utils.DSpace;
/**
* {@link DSpaceRunnable} implementation to update solr items with "predb" status to either:
* - Delete them from solr if they're not present in the database
* - Remove their status if they're present in the database
*/
public class SolrDatabaseResyncCli extends DSpaceRunnable<SolrDatabaseResyncCliScriptConfiguration> {
/* Log4j logger */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(SolrDatabaseResyncCli.class);
public static final String TIME_UNTIL_REINDEX_PROPERTY = "solr-database-resync.time-until-reindex";
private IndexingService indexingService;
private SolrSearchCore solrSearchCore;
private IndexObjectFactoryFactory indexObjectServiceFactory;
private ConfigurationService configurationService;
private int timeUntilReindex = 0;
private String maxTime;
@Override
public SolrDatabaseResyncCliScriptConfiguration getScriptConfiguration() {
return new DSpace().getServiceManager()
.getServiceByName("solr-database-resync", SolrDatabaseResyncCliScriptConfiguration.class);
}
public static void runScheduled() throws Exception {
SolrDatabaseResyncCli script = new SolrDatabaseResyncCli();
script.setup();
script.internalRun();
}
@Override
public void setup() throws ParseException {
indexingService = DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName(IndexingService.class.getName(), IndexingService.class);
solrSearchCore = DSpaceServicesFactory.getInstance().getServiceManager()
.getServicesByType(SolrSearchCore.class).get(0);
indexObjectServiceFactory = IndexObjectFactoryFactory.getInstance();
configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
}
@Override
public void internalRun() throws Exception {
logInfoAndOut("Starting Item resync of Solr and Database...");
timeUntilReindex = getTimeUntilReindex();
maxTime = getMaxTime();
Context context = new Context();
try {
context.turnOffAuthorisationSystem();
performStatusUpdate(context);
} finally {
context.restoreAuthSystemState();
context.complete();
}
}
private void performStatusUpdate(Context context) throws SearchServiceException, SolrServerException, IOException {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery(STATUS_FIELD + ":" + STATUS_FIELD_PREDB);
solrQuery.addFilterQuery(SearchUtils.RESOURCE_TYPE_FIELD + ":" + IndexableItem.TYPE);
String dateRangeFilter = SearchUtils.LAST_INDEXED_FIELD + ":[* TO " + maxTime + "]";
logDebugAndOut("Date range filter used; " + dateRangeFilter);
solrQuery.addFilterQuery(dateRangeFilter);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
QueryResponse response = solrSearchCore.getSolr().query(solrQuery, solrSearchCore.REQUEST_METHOD);
if (response != null) {
logInfoAndOut(response.getResults().size() + " items found to process");
for (SolrDocument doc : response.getResults()) {
String uuid = (String) doc.getFirstValue(SearchUtils.RESOURCE_ID_FIELD);
String uniqueId = (String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID);
logDebugAndOut("Processing item with UUID: " + uuid);
Optional<IndexableObject> indexableObject = Optional.empty();
try {
indexableObject = indexObjectServiceFactory
.getIndexableObjectFactory(uniqueId).findIndexableObject(context, uuid);
} catch (SQLException e) {
log.warn("An exception occurred when attempting to retrieve item with UUID \"" + uuid +
"\" from the database, removing related solr document", e);
}
try {
if (indexableObject.isPresent()) {
logDebugAndOut("Item exists in DB, updating solr document");
updateItem(context, indexableObject.get());
} else {
logDebugAndOut("Item doesn't exist in DB, removing solr document");
removeItem(context, uniqueId);
}
} catch (SQLException | IOException e) {
log.error(e.getMessage(), e);
}
}
}
indexingService.commit();
}
private void updateItem(Context context, IndexableObject indexableObject) throws SolrServerException, IOException {
Map<String,Object> fieldModifier = new HashMap<>(1);
fieldModifier.put("remove", STATUS_FIELD_PREDB);
indexingService.atomicUpdate(context, indexableObject.getUniqueIndexID(), STATUS_FIELD, fieldModifier);
}
private void removeItem(Context context, String uniqueId) throws IOException, SQLException {
indexingService.unIndexContent(context, uniqueId);
}
private String getMaxTime() {
Calendar cal = Calendar.getInstance();
if (timeUntilReindex > 0) {
cal.add(Calendar.MILLISECOND, -timeUntilReindex);
}
return SolrUtils.getDateFormatter().format(cal.getTime());
}
private int getTimeUntilReindex() {
return configurationService.getIntProperty(TIME_UNTIL_REINDEX_PROPERTY, 0);
}
private void logInfoAndOut(String message) {
log.info(message);
System.out.println(message);
}
private void logDebugAndOut(String message) {
log.debug(message);
System.out.println(message);
}
}

View File

@@ -0,0 +1,42 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import org.apache.commons.cli.Options;
import org.dspace.core.Context;
import org.dspace.scripts.configuration.ScriptConfiguration;
/**
* The {@link ScriptConfiguration} for the {@link SolrDatabaseResyncCli} script.
*/
public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguration<SolrDatabaseResyncCli> {
private Class<SolrDatabaseResyncCli> dspaceRunnableClass;
@Override
public Class<SolrDatabaseResyncCli> getDspaceRunnableClass() {
return dspaceRunnableClass;
}
@Override
public void setDspaceRunnableClass(Class<SolrDatabaseResyncCli> dspaceRunnableClass) {
this.dspaceRunnableClass = dspaceRunnableClass;
}
@Override
public boolean isAllowedToExecute(Context context) {
return true;
}
@Override
public Options getOptions() {
if (options == null) {
options = new Options();
}
return options;
}
}

View File

@@ -0,0 +1,104 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.authenticate.factory.AuthenticateServiceFactory;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.kernel.ServiceManager;
import org.dspace.utils.DSpace;
/**
* Implementation of {@link AuthenticationMethod} that delegate all the method
* invocations to the bean of class {@link OrcidAuthenticationBean}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthentication implements AuthenticationMethod {
private final ServiceManager serviceManager = new DSpace().getServiceManager();
/**
* Check if OrcidAuthentication plugin is enabled
* @return true if enabled, false otherwise
*/
public static boolean isEnabled() {
String pluginName = new OrcidAuthentication().getName();
Iterator<AuthenticationMethod> authenticationMethodIterator = AuthenticateServiceFactory.getInstance()
.getAuthenticationService().authenticationMethodIterator();
while (authenticationMethodIterator.hasNext()) {
if (pluginName.equals(authenticationMethodIterator.next().getName())) {
return true;
}
}
return false;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().canSelfRegister(context, request, username);
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
getOrcidAuthentication().initEPerson(context, request, eperson);
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return getOrcidAuthentication().allowSetPassword(context, request, username);
}
@Override
public boolean isImplicit() {
return getOrcidAuthentication().isImplicit();
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return getOrcidAuthentication().getSpecialGroups(context, request);
}
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
return getOrcidAuthentication().authenticate(context, username, password, realm, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
return getOrcidAuthentication().loginPageURL(context, request, response);
}
@Override
public String getName() {
return getOrcidAuthentication().getName();
}
private OrcidAuthenticationBean getOrcidAuthentication() {
return serviceManager.getServiceByName("orcidAuthentication", OrcidAuthenticationBean.class);
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return getOrcidAuthentication().isUsed(context, request);
}
}

View File

@@ -0,0 +1,330 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.authenticate;
import static java.lang.String.format;
import static java.net.URLEncoder.encode;
import static org.apache.commons.lang.BooleanUtils.toBoolean;
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.dspace.content.Item.ANY;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.client.OrcidClient;
import org.dspace.app.orcid.client.OrcidConfiguration;
import org.dspace.app.orcid.model.OrcidTokenResponseDTO;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.profile.ResearcherProfile;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.ConfigurationService;
import org.orcid.jaxb.model.v3.release.record.Email;
import org.orcid.jaxb.model.v3.release.record.Person;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* ORCID authentication for DSpace.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidAuthenticationBean implements AuthenticationMethod {
public static final String ORCID_AUTH_ATTRIBUTE = "orcid-authentication";
private final static Logger LOGGER = LoggerFactory.getLogger(OrcidAuthenticationBean.class);
private final static String LOGIN_PAGE_URL_FORMAT = "%s?client_id=%s&response_type=code&scope=%s&redirect_uri=%s";
@Autowired
private OrcidClient orcidClient;
@Autowired
private OrcidConfiguration orcidConfiguration;
@Autowired
private ConfigurationService configurationService;
@Autowired
private EPersonService ePersonService;
@Autowired
private ResearcherProfileService researcherProfileService;
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Autowired
private OrcidTokenService orcidTokenService;
@Override
public int authenticate(Context context, String username, String password, String realm, HttpServletRequest request)
throws SQLException {
if (request == null) {
LOGGER.warn("Unable to authenticate using ORCID because the request object is null.");
return BAD_ARGS;
}
String code = (String) request.getParameter("code");
if (StringUtils.isEmpty(code)) {
LOGGER.warn("The incoming request has not code parameter");
return NO_SUCH_USER;
}
request.setAttribute(ORCID_AUTH_ATTRIBUTE, true);
return authenticateWithOrcid(context, code, request);
}
@Override
public String loginPageURL(Context context, HttpServletRequest request, HttpServletResponse response) {
String authorizeUrl = orcidConfiguration.getAuthorizeEndpointUrl();
String clientId = orcidConfiguration.getClientId();
String redirectUri = orcidConfiguration.getRedirectUrl();
String scopes = String.join("+", orcidConfiguration.getScopes());
if (StringUtils.isAnyBlank(authorizeUrl, clientId, redirectUri, scopes)) {
LOGGER.error("Missing mandatory configuration properties for OrcidAuthentication");
return "";
}
try {
return format(LOGIN_PAGE_URL_FORMAT, authorizeUrl, clientId, scopes, encode(redirectUri, "UTF-8"));
} catch (UnsupportedEncodingException e) {
LOGGER.error(e.getMessage(), e);
return "";
}
}
@Override
public boolean isUsed(Context context, HttpServletRequest request) {
return request.getAttribute(ORCID_AUTH_ATTRIBUTE) != null;
}
@Override
public boolean canSelfRegister(Context context, HttpServletRequest request, String username) throws SQLException {
return canSelfRegister();
}
@Override
public void initEPerson(Context context, HttpServletRequest request, EPerson eperson) throws SQLException {
}
@Override
public boolean allowSetPassword(Context context, HttpServletRequest request, String username) throws SQLException {
return false;
}
@Override
public boolean isImplicit() {
return false;
}
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) throws SQLException {
return Collections.emptyList();
}
@Override
public String getName() {
return "orcid";
}
private int authenticateWithOrcid(Context context, String code, HttpServletRequest request) throws SQLException {
OrcidTokenResponseDTO token = getOrcidAccessToken(code);
if (token == null) {
return NO_SUCH_USER;
}
String orcid = token.getOrcid();
EPerson ePerson = ePersonService.findByNetid(context, orcid);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
Person person = getPersonFromOrcid(token);
if (person == null) {
return NO_SUCH_USER;
}
String email = getEmail(person).orElse(null);
ePerson = ePersonService.findByEmail(context, email);
if (ePerson != null) {
return ePerson.canLogIn() ? logInEPerson(context, token, ePerson) : BAD_ARGS;
}
return canSelfRegister() ? registerNewEPerson(context, person, token) : NO_SUCH_USER;
}
private int logInEPerson(Context context, OrcidTokenResponseDTO token, EPerson ePerson)
throws SQLException {
context.setCurrentUser(ePerson);
setOrcidMetadataOnEPerson(context, ePerson, token);
ResearcherProfile profile = findProfile(context, ePerson);
if (profile != null) {
orcidSynchronizationService.linkProfile(context, profile.getItem(), token);
}
return SUCCESS;
}
private ResearcherProfile findProfile(Context context, EPerson ePerson) throws SQLException {
try {
return researcherProfileService.findById(context, ePerson.getID());
} catch (AuthorizeException e) {
throw new RuntimeException(e);
}
}
private int registerNewEPerson(Context context, Person person, OrcidTokenResponseDTO token) throws SQLException {
try {
context.turnOffAuthorisationSystem();
String email = getEmail(person)
.orElseThrow(() -> new IllegalStateException("The email is configured private on orcid"));
String orcid = token.getOrcid();
EPerson eperson = ePersonService.create(context);
eperson.setNetid(orcid);
eperson.setEmail(email);
Optional<String> firstName = getFirstName(person);
if (firstName.isPresent()) {
eperson.setFirstName(context, firstName.get());
}
Optional<String> lastName = getLastName(person);
if (lastName.isPresent()) {
eperson.setLastName(context, lastName.get());
}
eperson.setCanLogIn(true);
eperson.setSelfRegistered(true);
setOrcidMetadataOnEPerson(context, eperson, token);
ePersonService.update(context, eperson);
context.setCurrentUser(eperson);
context.dispatchEvents();
return SUCCESS;
} catch (Exception ex) {
LOGGER.error("An error occurs registering a new EPerson from ORCID", ex);
context.rollback();
return NO_SUCH_USER;
} finally {
context.restoreAuthSystemState();
}
}
private void setOrcidMetadataOnEPerson(Context context, EPerson person, OrcidTokenResponseDTO token)
throws SQLException {
String orcid = token.getOrcid();
String accessToken = token.getAccessToken();
String[] scopes = token.getScopeAsArray();
ePersonService.setMetadataSingleValue(context, person, "eperson", "orcid", null, null, orcid);
ePersonService.clearMetadata(context, person, "eperson", "orcid", "scope", ANY);
for (String scope : scopes) {
ePersonService.addMetadata(context, person, "eperson", "orcid", "scope", null, scope);
}
OrcidToken orcidToken = orcidTokenService.findByEPerson(context, person);
if (orcidToken == null) {
orcidTokenService.create(context, person, accessToken);
} else {
orcidToken.setAccessToken(accessToken);
}
}
private Person getPersonFromOrcid(OrcidTokenResponseDTO token) {
try {
return orcidClient.getPerson(token.getAccessToken(), token.getOrcid());
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID record with id " + token.getOrcid(), ex);
return null;
}
}
private Optional<String> getEmail(Person person) {
List<Email> emails = person.getEmails() != null ? person.getEmails().getEmails() : Collections.emptyList();
if (CollectionUtils.isEmpty(emails)) {
return Optional.empty();
}
return Optional.ofNullable(emails.get(0).getEmail());
}
private Optional<String> getFirstName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getGivenNames())
.map(givenNames -> givenNames.getContent());
}
private Optional<String> getLastName(Person person) {
return Optional.ofNullable(person.getName())
.map(name -> name.getFamilyName())
.map(givenNames -> givenNames.getContent());
}
private boolean canSelfRegister() {
String canSelfRegister = configurationService.getProperty("authentication-orcid.can-self-register", "true");
if (isBlank(canSelfRegister)) {
return true;
}
return toBoolean(canSelfRegister);
}
private OrcidTokenResponseDTO getOrcidAccessToken(String code) {
try {
return orcidClient.getAccessToken(code);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID access_token", ex);
return null;
}
}
public OrcidClient getOrcidClient() {
return orcidClient;
}
public void setOrcidClient(OrcidClient orcidClient) {
this.orcidClient = orcidClient;
}
}

View File

@@ -158,6 +158,11 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
} }
bundle.addBitstream(bitstream); bundle.addBitstream(bitstream);
// If a bitstream is moved from one bundle to another it may be temporarily flagged as deleted
// (when removed from the original bundle)
if (bitstream.isDeleted()) {
bitstream.setDeleted(false);
}
bitstream.getBundles().add(bundle); bitstream.getBundles().add(bundle);

View File

@@ -26,6 +26,8 @@ import java.util.stream.Stream;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.util.AuthorizeUtil; import org.dspace.app.util.AuthorizeUtil;
import org.dspace.authorize.AuthorizeConfiguration; import org.dspace.authorize.AuthorizeConfiguration;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -124,6 +126,9 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
@Autowired(required = true) @Autowired(required = true)
private EntityTypeService entityTypeService; private EntityTypeService entityTypeService;
@Autowired
private OrcidTokenService orcidTokenService;
protected ItemServiceImpl() { protected ItemServiceImpl() {
super(); super();
} }
@@ -752,6 +757,11 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
harvestedItemService.delete(context, hi); harvestedItemService.delete(context, hi);
} }
OrcidToken orcidToken = orcidTokenService.findByProfileItem(context, item);
if (orcidToken != null) {
orcidToken.setProfileItem(null);
}
//Only clear collections after we have removed everything else from the item //Only clear collections after we have removed everything else from the item
item.clearCollections(); item.clearCollections();
item.setOwningCollection(null); item.setOwningCollection(null);

View File

@@ -538,6 +538,36 @@ public class Context implements AutoCloseable {
} }
} }
/**
* Rollback the current transaction with the database, without persisting any
* pending changes. The database connection is not closed and can be reused
* afterwards.
*
* <b>WARNING: After calling this method all previously fetched entities are
* "detached" (pending changes are not tracked anymore). You have to reload all
* entities you still want to work with manually after this method call (see
* {@link Context#reloadEntity(ReloadableEntity)}).</b>
*
* @throws SQLException When rollbacking the transaction in the database fails.
*/
public void rollback() throws SQLException {
// If Context is no longer open/valid, just note that it has already been closed
if (!isValid()) {
log.info("rollback() was called on a closed Context object. No changes to abort.");
return;
}
try {
// Rollback ONLY if we have a database transaction, and it is NOT Read Only
if (!isReadOnly() && isTransactionAlive()) {
dbConnection.rollback();
reloadContextBoundEntities();
}
} finally {
events = null;
}
}
/** /**
* Close the context, without committing any of the changes performed using * Close the context, without committing any of the changes performed using
* this context. The database connection is freed. No exception is thrown if * this context. The database connection is freed. No exception is thrown if

View File

@@ -259,12 +259,19 @@ public class Curation extends DSpaceRunnable<CurationScriptConfiguration> {
super.handler.logError("EPerson not found: " + currentUserUuid); super.handler.logError("EPerson not found: " + currentUserUuid);
throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid); throw new IllegalArgumentException("Unable to find a user with uuid: " + currentUserUuid);
} }
assignSpecialGroupsInContext();
this.context.setCurrentUser(eperson); this.context.setCurrentUser(eperson);
} catch (SQLException e) { } catch (SQLException e) {
handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e); handler.handleException("Something went wrong trying to fetch eperson for uuid: " + currentUserUuid, e);
} }
} }
protected void assignSpecialGroupsInContext() throws SQLException {
for (UUID uuid : handler.getSpecialGroups()) {
context.setSpecialGroup(uuid);
}
}
/** /**
* Fills in some optional command line options. * Fills in some optional command line options.
* Checks if there are missing required options or invalid values for options. * Checks if there are missing required options or invalid values for options.

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@@ -37,6 +38,8 @@ public class IndexEventConsumer implements Consumer {
// collect Items, Collections, Communities that need indexing // collect Items, Collections, Communities that need indexing
private Set<IndexableObject> objectsToUpdate = new HashSet<>(); private Set<IndexableObject> objectsToUpdate = new HashSet<>();
// collect freshly created Items that need indexing (requires pre-db status)
private Set<IndexableObject> createdItemsToUpdate = new HashSet<>();
// unique search IDs to delete // unique search IDs to delete
private Set<String> uniqueIdsToDelete = new HashSet<>(); private Set<String> uniqueIdsToDelete = new HashSet<>();
@@ -65,6 +68,7 @@ public class IndexEventConsumer implements Consumer {
if (objectsToUpdate == null) { if (objectsToUpdate == null) {
objectsToUpdate = new HashSet<>(); objectsToUpdate = new HashSet<>();
uniqueIdsToDelete = new HashSet<>(); uniqueIdsToDelete = new HashSet<>();
createdItemsToUpdate = new HashSet<>();
} }
int st = event.getSubjectType(); int st = event.getSubjectType();
@@ -143,6 +147,7 @@ public class IndexEventConsumer implements Consumer {
String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString(); String detail = indexableObjectService.getType() + "-" + event.getSubjectID().toString();
uniqueIdsToDelete.add(detail); uniqueIdsToDelete.add(detail);
} }
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject)); objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, subject));
} }
break; break;
@@ -162,7 +167,7 @@ public class IndexEventConsumer implements Consumer {
// also update the object in order to index mapped/unmapped Items // also update the object in order to index mapped/unmapped Items
if (subject != null && if (subject != null &&
subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) { subject.getType() == Constants.COLLECTION && object.getType() == Constants.ITEM) {
objectsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object)); createdItemsToUpdate.addAll(indexObjectServiceFactory.getIndexableObjects(ctx, object));
} }
} }
break; break;
@@ -209,23 +214,11 @@ public class IndexEventConsumer implements Consumer {
} }
// update the changed Items not deleted because they were on create list // update the changed Items not deleted because they were on create list
for (IndexableObject iu : objectsToUpdate) { for (IndexableObject iu : objectsToUpdate) {
/* we let all types through here and indexObject(ctx, iu, false);
* allow the search indexer to make }
* decisions on indexing and/or removal // update the created Items with a pre-db status
*/ for (IndexableObject iu : createdItemsToUpdate) {
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject())); indexObject(ctx, iu, true);
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
}
}
} }
} finally { } finally {
if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) { if (!objectsToUpdate.isEmpty() || !uniqueIdsToDelete.isEmpty()) {
@@ -235,6 +228,27 @@ public class IndexEventConsumer implements Consumer {
// "free" the resources // "free" the resources
objectsToUpdate.clear(); objectsToUpdate.clear();
uniqueIdsToDelete.clear(); uniqueIdsToDelete.clear();
createdItemsToUpdate.clear();
}
}
}
private void indexObject(Context ctx, IndexableObject iu, boolean preDb) throws SQLException {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
iu.setIndexedObject(ctx.reloadEntity(iu.getIndexedObject()));
String uniqueIndexID = iu.getUniqueIndexID();
if (uniqueIndexID != null) {
try {
indexer.indexContent(ctx, iu, true, false, preDb);
log.debug("Indexed "
+ iu.getTypeText()
+ ", id=" + iu.getID()
+ ", unique_id=" + uniqueIndexID);
} catch (Exception e) {
log.error("Failed while indexing object: ", e);
} }
} }
} }

View File

@@ -9,7 +9,9 @@ package org.dspace.discovery;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.core.Context; import org.dspace.core.Context;
/** /**
@@ -30,6 +32,17 @@ public interface IndexingService {
void indexContent(Context context, IndexableObject dso, void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit) throws SQLException, SearchServiceException; boolean force, boolean commit) throws SQLException, SearchServiceException;
/**
* Index a given DSO
* @param context The DSpace Context
* @param dso The DSpace Object to index
* @param force Force update even if not stale
* @param commit Commit the changes
* @param preDb Add a "preDB" status to the index (only applicable to Items)
*/
void indexContent(Context context, IndexableObject dso,
boolean force, boolean commit, boolean preDb) throws SQLException, SearchServiceException;
void unIndexContent(Context context, IndexableObject dso) void unIndexContent(Context context, IndexableObject dso)
throws SQLException, IOException; throws SQLException, IOException;
@@ -62,4 +75,15 @@ public interface IndexingService {
void optimize() throws SearchServiceException; void optimize() throws SearchServiceException;
void buildSpellCheck() throws SearchServiceException, IOException; void buildSpellCheck() throws SearchServiceException, IOException;
/**
* Atomically update the index of a single field for an object
* @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for
* @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/
*/
void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String,Object> fieldModifier)
throws SolrServerException, IOException;
} }

View File

@@ -8,6 +8,7 @@
package org.dspace.discovery; package org.dspace.discovery;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -38,6 +39,7 @@ public interface SearchService {
DiscoverResult search(Context context, DiscoverQuery query) DiscoverResult search(Context context, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/** /**
* Convenient method to call @see #search(Context, DSpaceObject, * Convenient method to call @see #search(Context, DSpaceObject,
* DiscoverQuery, boolean) with includeWithdrawn=false * DiscoverQuery, boolean) with includeWithdrawn=false
@@ -52,9 +54,22 @@ public interface SearchService {
DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query) DiscoverResult search(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException; throws SearchServiceException;
/**
* Convenience method to call @see #search(Context, DSpaceObject, DiscoverQuery) and getting an iterator for the
* results
*
* @param context DSpace context object
* @param dso a DSpace object to use as a scope of the search
* @param query the discovery query object
* @return an iterator iterating over all results from the search
* @throws SearchServiceException if search error
*/
Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException;
List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset, List<IndexableObject> search(Context context, String query, String orderfield, boolean ascending, int offset,
int max, String... filterquery); int max, String... filterquery);
/** /**
* Transforms the given string field and value into a filter query * Transforms the given string field and value into a filter query

View File

@@ -20,6 +20,7 @@ import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.utils.DiscoverQueryBuilder;
import org.dspace.kernel.ServiceManager; import org.dspace.kernel.ServiceManager;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
@@ -170,4 +171,10 @@ public class SearchUtils {
DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName); DiscoveryConfiguration configurationExtra = getDiscoveryConfigurationByName(confName);
result.add(configurationExtra); result.add(configurationExtra);
} }
public static DiscoverQueryBuilder getQueryBuilder() {
ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager();
return manager
.getServiceByName(DiscoverQueryBuilder.class.getName(), DiscoverQueryBuilder.class);
}
} }

View File

@@ -8,6 +8,8 @@
package org.dspace.discovery; package org.dspace.discovery;
import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.joining;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
@@ -118,8 +120,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
/** /**
* If the handle for the "dso" already exists in the index, and the "dso" * If the handle for the "dso" already exists in the index, and the "dso"
* has a lastModified timestamp that is newer than the document in the index * has a lastModified timestamp that is newer than the document in the index
@@ -166,6 +166,24 @@ public class SolrServiceImpl implements SearchService, IndexingService {
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument); indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} }
/**
* Update the given indexable object using a given service
* @param context The DSpace Context
* @param indexableObjectService The service to index the object with
* @param indexableObject The object to index
* @param preDB Add a "preDB" status to the document
*/
protected void update(Context context, IndexFactory indexableObjectService, IndexableObject indexableObject,
boolean preDB) throws IOException, SQLException, SolrServerException {
if (preDB) {
final SolrInputDocument solrInputDocument =
indexableObjectService.buildNewDocument(context, indexableObject);
indexableObjectService.writeDocument(context, indexableObject, solrInputDocument);
} else {
update(context, indexableObjectService, indexableObject);
}
}
/** /**
* unIndex removes an Item, Collection, or Community * unIndex removes an Item, Collection, or Community
* *
@@ -454,6 +472,16 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void atomicUpdate(Context context, String uniqueIndexId, String field, Map<String, Object> fieldModifier)
throws SolrServerException, IOException {
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchUtils.RESOURCE_UNIQUE_ID, uniqueIndexId);
solrInputDocument.addField(field, fieldModifier);
solrSearchCore.getSolr().add(solrInputDocument);
}
// ////////////////////////////////// // //////////////////////////////////
// Private // Private
// ////////////////////////////////// // //////////////////////////////////
@@ -710,16 +738,21 @@ public class SolrServiceImpl implements SearchService, IndexingService {
discoveryQuery.addFilterQueries("location:l" + dso.getID()); discoveryQuery.addFilterQueries("location:l" + dso.getID());
} else if (dso instanceof IndexableItem) { } else if (dso instanceof IndexableItem) {
discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso. discoveryQuery.addFilterQueries(SearchUtils.RESOURCE_UNIQUE_ID + ":" + dso.
getUniqueIndexID()); getUniqueIndexID());
} }
} }
return search(context, discoveryQuery); return search(context, discoveryQuery);
} }
@Override
public Iterator<Item> iteratorSearch(Context context, IndexableObject dso, DiscoverQuery query)
throws SearchServiceException {
return new SearchIterator(context, dso, query);
}
@Override @Override
public DiscoverResult search(Context context, DiscoverQuery discoveryQuery ) public DiscoverResult search(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException { throws SearchServiceException {
try { try {
if (solrSearchCore.getSolr() == null) { if (solrSearchCore.getSolr() == null) {
@@ -733,6 +766,72 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
/**
* This class implements an iterator over items that is specifically used to iterate over search results
*/
private class SearchIterator implements Iterator<Item> {
private Context context;
private DiscoverQuery discoverQuery;
private DiscoverResult discoverResult;
private IndexableObject dso;
private int absoluteCursor;
private int relativeCursor;
private int pagesize;
SearchIterator(Context context, DiscoverQuery discoverQuery) throws SearchServiceException {
this.context = context;
this.discoverQuery = discoverQuery;
this.absoluteCursor = discoverQuery.getStart();
initialise();
}
SearchIterator(Context context, IndexableObject dso, DiscoverQuery discoverQuery)
throws SearchServiceException {
this.context = context;
this.dso = dso;
this.discoverQuery = discoverQuery;
initialise();
}
private void initialise() throws SearchServiceException {
this.relativeCursor = 0;
if (discoverQuery.getMaxResults() != -1) {
pagesize = discoverQuery.getMaxResults();
} else {
pagesize = 10;
}
discoverQuery.setMaxResults(pagesize);
this.discoverResult = search(context, dso, discoverQuery);
}
@Override
public boolean hasNext() {
return absoluteCursor < discoverResult.getTotalSearchResults();
}
@Override
public Item next() {
//paginate getting results from the discoverquery.
if (relativeCursor == pagesize) {
// get a new page of results when the last element of the previous page has been read
int offset = absoluteCursor;
// reset the position counter for getting element relativecursor on a page
relativeCursor = 0;
discoverQuery.setStart(offset);
try {
discoverResult = search(context, dso, discoverQuery);
} catch (SearchServiceException e) {
log.error("error while getting search results", e);
}
}
// get the element at position relativecursor on a page
IndexableObject res = discoverResult.getIndexableObjects().get(relativeCursor);
relativeCursor++;
absoluteCursor++;
return (Item) res.getIndexedObject();
}
}
protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery) protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery)
throws SearchServiceException { throws SearchServiceException {
SolrQuery solrQuery = new SolrQuery(); SolrQuery solrQuery = new SolrQuery();
@@ -753,6 +852,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD); solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD); solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID); solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
solrQuery.addField(STATUS_FIELD);
if (discoveryQuery.isSpellCheck()) { if (discoveryQuery.isSpellCheck()) {
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query); solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -903,11 +1003,14 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Enables solr to remove documents related to items not on database anymore (Stale) // Enables solr to remove documents related to items not on database anymore (Stale)
// if maxAttemps is greater than 0 cleanup the index on each step // if maxAttemps is greater than 0 cleanup the index on each step
if (maxAttempts >= 0) { if (maxAttempts >= 0) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)); Object statusObj = doc.getFirstValue(STATUS_FIELD);
// avoid to process the response except if we are in the last allowed execution. if (!(statusObj instanceof String && statusObj.equals(STATUS_FIELD_PREDB))) {
// When maxAttempts is 0 this will be just the first and last run as the zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// executionCount is increased at the start of the loop it will be equals to 1 // avoid to process the response except if we are in the last allowed execution.
skipLoadingResponse = maxAttempts + 1 != executionCount; // When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount;
}
} }
continue; continue;
} }
@@ -1389,6 +1492,28 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
} }
@Override
public void indexContent(Context context, IndexableObject indexableObject, boolean force,
boolean commit, boolean preDb) throws SearchServiceException, SQLException {
if (preDb) {
try {
final IndexFactory indexableObjectFactory = indexObjectServiceFactory.
getIndexableObjectFactory(indexableObject);
if (force || requiresIndexing(indexableObject.getUniqueIndexID(), indexableObject.getLastModified())) {
update(context, indexableObjectFactory, indexableObject, true);
log.info(LogHelper.getHeader(context, "indexed_object", indexableObject.getUniqueIndexID()));
}
} catch (IOException | SQLException | SolrServerException | SearchServiceException e) {
log.error(e.getMessage(), e);
}
} else {
indexContent(context, indexableObject, force);
}
if (commit) {
commit();
}
}
@Override @Override
public void commit() throws SearchServiceException { public void commit() throws SearchServiceException {
try { try {

View File

@@ -70,10 +70,20 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
return doc; return doc;
} }
@Override
public SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException {
return buildDocument(context, indexableObject);
}
@Override @Override
public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument) public void writeDocument(Context context, T indexableObject, SolrInputDocument solrInputDocument)
throws SQLException, IOException, SolrServerException { throws SQLException, IOException, SolrServerException {
writeDocument(solrInputDocument, null); try {
writeDocument(solrInputDocument, null);
} catch (Exception e) {
log.error("Error occurred while writing SOLR document for {} object {}",
indexableObject.getType(), indexableObject.getID(), e);
}
} }
/** /**

View File

@@ -79,6 +79,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemIndexFactoryImpl.class);
public static final String VARIANTS_STORE_SEPARATOR = "###"; public static final String VARIANTS_STORE_SEPARATOR = "###";
public static final String STORE_SEPARATOR = "\n|||\n"; public static final String STORE_SEPARATOR = "\n|||\n";
public static final String STATUS_FIELD = "database_status";
public static final String STATUS_FIELD_PREDB = "predb";
@Autowired @Autowired
@@ -210,6 +212,14 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl<Indexable
return item.equals(latestVersion.getItem()); return item.equals(latestVersion.getItem());
} }
@Override
public SolrInputDocument buildNewDocument(Context context, IndexableItem indexableItem)
throws SQLException, IOException {
SolrInputDocument doc = buildDocument(context, indexableItem);
doc.addField(STATUS_FIELD, STATUS_FIELD_PREDB);
return doc;
}
@Override @Override
public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item, public void addDiscoveryFields(SolrInputDocument doc, Context context, Item item,
List<DiscoveryConfiguration> discoveryConfigurations) List<DiscoveryConfiguration> discoveryConfigurations)

View File

@@ -46,6 +46,14 @@ public interface IndexFactory<T extends IndexableObject, S> {
*/ */
SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException; SolrInputDocument buildDocument(Context context, T indexableObject) throws SQLException, IOException;
/**
* Create solr document with all the shared fields initialized.
* Can contain special fields required for "new" documents vs regular buildDocument
* @param indexableObject the indexableObject that we want to index
* @return initialized solr document
*/
SolrInputDocument buildNewDocument(Context context, T indexableObject) throws SQLException, IOException;
/** /**
* Write the provided document to the solr core * Write the provided document to the solr core
* @param context DSpace context object * @param context DSpace context object

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.utils; package org.dspace.discovery.utils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList; import static java.util.Collections.singletonList;
@@ -19,10 +19,6 @@ import java.util.Objects;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.converter.query.SearchQueryConverter;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFacetField;
@@ -32,6 +28,7 @@ import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration; import org.dspace.discovery.configuration.DiscoveryHitHighlightFieldConfiguration;
@@ -40,17 +37,11 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet;
import org.dspace.discovery.configuration.DiscoverySortConfiguration; import org.dspace.discovery.configuration.DiscoverySortConfiguration;
import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.stereotype.Component;
/**
* This class builds the queries for the /search and /facet endpoints.
*/
@Component
public class DiscoverQueryBuilder implements InitializingBean { public class DiscoverQueryBuilder implements InitializingBean {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DiscoverQueryBuilder.class);
@@ -74,51 +65,60 @@ public class DiscoverQueryBuilder implements InitializingBean {
/** /**
* Build a discovery query * Build a discovery query
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page) String dsoType, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection) throws SearchServiceException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, page); return buildQuery(context, scope, discoveryConfiguration, query, searchFilters, dsoTypes, pageSize, offset,
sortProperty, sortDirection);
} }
/** /**
* Build a discovery query * Build a discovery query
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param offset the offset for this discovery query
* @param sortProperty the sort property for this discovery query
* @param sortDirection the sort direction for this discovery query
*/ */
public DiscoverQuery buildQuery(Context context, IndexableObject scope, public DiscoverQuery buildQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String query, List<SearchFilter> searchFilters, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page) List<String> dsoTypes, Integer pageSize, Long offset, String sortProperty,
throws DSpaceBadRequestException { String sortDirection)
throws IllegalArgumentException, SearchServiceException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFaceting(context, scope, queryArgs, discoveryConfiguration); addFaceting(context, scope, queryArgs, discoveryConfiguration);
//Configure pagination and sorting //Configure pagination and sorting
configurePagination(page, queryArgs); configurePagination(pageSize, offset, queryArgs);
configureSorting(page, queryArgs, discoveryConfiguration.getSearchSortConfiguration()); configureSorting(sortProperty, sortDirection, queryArgs, discoveryConfiguration.getSearchSortConfiguration());
addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs); addDiscoveryHitHighlightFields(discoveryConfiguration, queryArgs);
return queryArgs; return queryArgs;
@@ -128,11 +128,11 @@ public class DiscoverQueryBuilder implements InitializingBean {
DiscoverQuery queryArgs) { DiscoverQuery queryArgs) {
if (discoveryConfiguration.getHitHighlightingConfiguration() != null) { if (discoveryConfiguration.getHitHighlightingConfiguration() != null) {
List<DiscoveryHitHighlightFieldConfiguration> metadataFields = discoveryConfiguration List<DiscoveryHitHighlightFieldConfiguration> metadataFields = discoveryConfiguration
.getHitHighlightingConfiguration().getMetadataFields(); .getHitHighlightingConfiguration().getMetadataFields();
for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) { for (DiscoveryHitHighlightFieldConfiguration fieldConfiguration : metadataFields) {
queryArgs.addHitHighlightingField( queryArgs.addHitHighlightingField(
new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(), new DiscoverHitHighlightingField(fieldConfiguration.getField(), fieldConfiguration.getMaxSize(),
fieldConfiguration.getSnippets())); fieldConfiguration.getSnippets()));
} }
} }
} }
@@ -140,92 +140,97 @@ public class DiscoverQueryBuilder implements InitializingBean {
/** /**
* Create a discovery facet query. * Create a discovery facet query.
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix. * @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoType only include search results with this type * @param dsoType only include search results with this type
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param facetName the facet field * @param offset the offset for this discovery query
* @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
String dsoType, Pageable page, String facetName) String dsoType, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList(); List<String> dsoTypes = dsoType != null ? singletonList(dsoType) : emptyList();
return buildFacetQuery( return buildFacetQuery(
context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); context, scope, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, pageSize, offset,
facetName);
} }
/** /**
* Create a discovery facet query. * Create a discovery facet query.
* *
* @param context the DSpace context * @param context the DSpace context
* @param scope the scope for this discovery query * @param scope the scope for this discovery query
* @param discoveryConfiguration the discovery configuration for this discovery query * @param discoveryConfiguration the discovery configuration for this discovery query
* @param prefix limit the facets results to those starting with the given prefix. * @param prefix limit the facets results to those starting with the given prefix.
* @param query the query string for this discovery query * @param query the query string for this discovery query
* @param searchFilters the search filters for this discovery query * @param searchFilters the search filters for this discovery query
* @param dsoTypes only include search results with one of these types * @param dsoTypes only include search results with one of these types
* @param page the pageable for this discovery query * @param pageSize the page size for this discovery query
* @param facetName the facet field * @param offset the offset for this discovery query
* @param facetName the facet field
*/ */
public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope, public DiscoverQuery buildFacetQuery(Context context, IndexableObject scope,
DiscoveryConfiguration discoveryConfiguration, DiscoveryConfiguration discoveryConfiguration,
String prefix, String query, List<SearchFilter> searchFilters, String prefix, String query, List<QueryBuilderSearchFilter> searchFilters,
List<String> dsoTypes, Pageable page, String facetName) List<String> dsoTypes, Integer pageSize, Long offset, String facetName)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters, DiscoverQuery queryArgs = buildCommonDiscoverQuery(context, discoveryConfiguration, query, searchFilters,
dsoTypes); dsoTypes);
//When all search criteria are set, configure facet results //When all search criteria are set, configure facet results
addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, page); addFacetingForFacets(context, scope, prefix, queryArgs, discoveryConfiguration, facetName, pageSize);
//We don' want any search results, we only want facet values //We don' want any search results, we only want facet values
queryArgs.setMaxResults(0); queryArgs.setMaxResults(0);
//Configure pagination //Configure pagination
configurePaginationForFacets(page, queryArgs); configurePaginationForFacets(offset, queryArgs);
return queryArgs; return queryArgs;
} }
private void configurePaginationForFacets(Pageable page, DiscoverQuery queryArgs) { private void configurePaginationForFacets(Long offset, DiscoverQuery queryArgs) {
if (page != null) { if (offset != null) {
queryArgs.setFacetOffset(Math.toIntExact(page.getOffset())); queryArgs.setFacetOffset(Math.toIntExact(offset));
} }
} }
private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix, private DiscoverQuery addFacetingForFacets(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration, String facetName, Pageable page) DiscoverQuery queryArgs, DiscoveryConfiguration discoveryConfiguration,
throws DSpaceBadRequestException { String facetName, Integer pageSize)
throws IllegalArgumentException {
DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName); DiscoverySearchFilterFacet facet = discoveryConfiguration.getSidebarFacet(facetName);
if (facet != null) { if (facet != null) {
queryArgs.setFacetMinCount(1); queryArgs.setFacetMinCount(1);
int pageSize = Math.min(pageSizeLimit, page.getPageSize());
pageSize = pageSize != null ? Math.min(pageSizeLimit, pageSize) : pageSizeLimit;
fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize); fillFacetIntoQueryArgs(context, scope, prefix, queryArgs, facet, pageSize);
} else { } else {
throw new DSpaceBadRequestException(facetName + " is not a valid search facet"); throw new IllegalArgumentException(facetName + " is not a valid search facet");
} }
return queryArgs; return queryArgs;
} }
private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix, private void fillFacetIntoQueryArgs(Context context, IndexableObject scope, String prefix,
DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) { DiscoverQuery queryArgs, DiscoverySearchFilterFacet facet, final int pageSize) {
if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) { if (facet.getType().equals(DiscoveryConfigurationParameters.TYPE_DATE)) {
try { try {
FacetYearRange facetYearRange = FacetYearRange facetYearRange =
searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs); searchService.getFacetYearRange(context, scope, facet, queryArgs.getFilterQueries(), queryArgs);
queryArgs.addYearRangeFacet(facet, facetYearRange); queryArgs.addYearRangeFacet(facet, facetYearRange);
@@ -241,18 +246,18 @@ public class DiscoverQueryBuilder implements InitializingBean {
int facetLimit = pageSize + 1; int facetLimit = pageSize + 1;
//This should take care of the sorting for us //This should take care of the sorting for us
queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit, queryArgs.addFacetField(new DiscoverFacetField(facet.getIndexFieldName(), facet.getType(), facetLimit,
facet.getSortOrderSidebar(), StringUtils.trimToNull(prefix))); facet.getSortOrderSidebar(),
StringUtils.trimToNull(prefix)));
} }
} }
private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration, private DiscoverQuery buildCommonDiscoverQuery(Context context, DiscoveryConfiguration discoveryConfiguration,
String query, String query,
List<SearchFilter> searchFilters, List<String> dsoTypes) List<QueryBuilderSearchFilter> searchFilters, List<String> dsoTypes)
throws DSpaceBadRequestException { throws IllegalArgumentException {
DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration); DiscoverQuery queryArgs = buildBaseQueryForConfiguration(discoveryConfiguration);
//Add search filters queryArgs.addFilterQueries(convertFiltersToString(context, discoveryConfiguration, searchFilters));
queryArgs.addFilterQueries(convertFilters(context, discoveryConfiguration, searchFilters));
//Set search query //Set search query
if (StringUtils.isNotBlank(query)) { if (StringUtils.isNotBlank(query)) {
@@ -274,30 +279,17 @@ public class DiscoverQueryBuilder implements InitializingBean {
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray( .toArray(
new String[discoveryConfiguration.getDefaultFilterQueries() new String[discoveryConfiguration
.size()])); .getDefaultFilterQueries()
.size()]));
return queryArgs; return queryArgs;
} }
private void configureSorting(Pageable page, DiscoverQuery queryArgs, private void configureSorting(String sortProperty, String sortDirection, DiscoverQuery queryArgs,
DiscoverySortConfiguration searchSortConfiguration) throws DSpaceBadRequestException { DiscoverySortConfiguration searchSortConfiguration)
String sortBy = null; throws IllegalArgumentException, SearchServiceException {
String sortOrder = null; String sortBy = sortProperty;
String sortOrder = sortDirection;
//Read the Pageable object if there is one
if (page != null) {
Sort sort = page.getSort();
if (sort != null && sort.iterator().hasNext()) {
Sort.Order order = sort.iterator().next();
sortBy = order.getProperty();
sortOrder = order.getDirection().name();
}
}
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new InvalidSearchRequestException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Load defaults if we did not receive values //Load defaults if we did not receive values
if (sortBy == null) { if (sortBy == null) {
@@ -307,24 +299,30 @@ public class DiscoverQueryBuilder implements InitializingBean {
sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder); sortOrder = getDefaultSortDirection(searchSortConfiguration, sortOrder);
} }
if (StringUtils.isNotBlank(sortBy) && !isConfigured(sortBy, searchSortConfiguration)) {
throw new SearchServiceException(
"The field: " + sortBy + "is not configured for the configuration!");
}
//Update Discovery query //Update Discovery query
DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration DiscoverySortFieldConfiguration sortFieldConfiguration = searchSortConfiguration
.getSortFieldConfiguration(sortBy); .getSortFieldConfiguration(sortBy);
if (sortFieldConfiguration != null) { if (sortFieldConfiguration != null) {
String sortField = searchService String sortField = searchService
.toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType()); .toSortFieldIndex(sortFieldConfiguration.getMetadataField(), sortFieldConfiguration.getType());
if ("asc".equalsIgnoreCase(sortOrder)) { if ("asc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc); queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.asc);
} else if ("desc".equalsIgnoreCase(sortOrder)) { } else if ("desc".equalsIgnoreCase(sortOrder)) {
queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc); queryArgs.setSortField(sortField, DiscoverQuery.SORT_ORDER.desc);
} else { } else {
throw new DSpaceBadRequestException(sortOrder + " is not a valid sort order"); throw new IllegalArgumentException(sortOrder + " is not a valid sort order");
} }
} else { } else {
throw new DSpaceBadRequestException(sortBy + " is not a valid sort field"); throw new IllegalArgumentException(sortBy + " is not a valid sort field");
} }
} }
@@ -334,7 +332,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) {
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name();
} }
return sortOrder; return sortOrder;
@@ -344,7 +342,7 @@ public class DiscoverQueryBuilder implements InitializingBean {
String sortBy;// Attempt to find the default one, if none found we use SCORE String sortBy;// Attempt to find the default one, if none found we use SCORE
sortBy = "score"; sortBy = "score";
if (Objects.nonNull(searchSortConfiguration.getSortFields()) && if (Objects.nonNull(searchSortConfiguration.getSortFields()) &&
!searchSortConfiguration.getSortFields().isEmpty()) { !searchSortConfiguration.getSortFields().isEmpty()) {
DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0);
if (StringUtils.isBlank(defaultSort.getMetadataField())) { if (StringUtils.isBlank(defaultSort.getMetadataField())) {
return sortBy; return sortBy;
@@ -354,66 +352,31 @@ public class DiscoverQueryBuilder implements InitializingBean {
return sortBy; return sortBy;
} }
private void configurePagination(Pageable page, DiscoverQuery queryArgs) { private void configurePagination(Integer size, Long offset, DiscoverQuery queryArgs) {
if (page != null) { queryArgs.setMaxResults(size != null ? Math.min(pageSizeLimit, size) : pageSizeLimit);
queryArgs.setMaxResults(Math.min(pageSizeLimit, page.getPageSize())); queryArgs.setStart(offset != null ? Math.toIntExact(offset) : 0);
queryArgs.setStart(Math.toIntExact(page.getOffset()));
} else {
queryArgs.setMaxResults(pageSizeLimit);
queryArgs.setStart(0);
}
} }
private String getDsoType(String dsoType) throws DSpaceBadRequestException { private String getDsoType(String dsoType) throws IllegalArgumentException {
for (IndexFactory indexFactory : indexableFactories) { for (IndexFactory indexFactory : indexableFactories) {
if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) { if (StringUtils.equalsIgnoreCase(indexFactory.getType(), dsoType)) {
return indexFactory.getType(); return indexFactory.getType();
} }
} }
throw new DSpaceBadRequestException(dsoType + " is not a valid DSpace Object type"); throw new IllegalArgumentException(dsoType + " is not a valid DSpace Object type");
} }
public void setIndexableFactories(List<IndexFactory> indexableFactories) { public void setIndexableFactories(List<IndexFactory> indexableFactories) {
this.indexableFactories = indexableFactories; this.indexableFactories = indexableFactories;
} }
private String[] convertFilters(Context context, DiscoveryConfiguration discoveryConfiguration,
List<SearchFilter> searchFilters) throws DSpaceBadRequestException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
SearchQueryConverter searchQueryConverter = new SearchQueryConverter();
List<SearchFilter> transformedFilters = searchQueryConverter.convert(searchFilters);
try {
for (SearchFilter searchFilter : CollectionUtils.emptyIfNull(transformedFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new DSpaceBadRequestException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new DSpaceBadRequestException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs, private DiscoverQuery addFaceting(Context context, IndexableObject scope, DiscoverQuery queryArgs,
DiscoveryConfiguration discoveryConfiguration) { DiscoveryConfiguration discoveryConfiguration) {
List<DiscoverySearchFilterFacet> facets = discoveryConfiguration.getSidebarFacets(); List<DiscoverySearchFilterFacet> facets = discoveryConfiguration.getSidebarFacets();
log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets log.debug("facets for configuration " + discoveryConfiguration.getId() + ": " + (facets != null ? facets
.size() : null)); .size() : null));
if (facets != null) { if (facets != null) {
queryArgs.setFacetMinCount(1); queryArgs.setFacetMinCount(1);
@@ -427,4 +390,34 @@ public class DiscoverQueryBuilder implements InitializingBean {
return queryArgs; return queryArgs;
} }
private String[] convertFiltersToString(Context context, DiscoveryConfiguration discoveryConfiguration,
List<QueryBuilderSearchFilter> searchFilters)
throws IllegalArgumentException {
ArrayList<String> filterQueries = new ArrayList<>(CollectionUtils.size(searchFilters));
try {
for (QueryBuilderSearchFilter searchFilter : CollectionUtils.emptyIfNull(searchFilters)) {
DiscoverySearchFilter filter = discoveryConfiguration.getSearchFilter(searchFilter.getName());
if (filter == null) {
throw new IllegalArgumentException(searchFilter.getName() + " is not a valid search filter");
}
DiscoverFilterQuery filterQuery = searchService.toFilterQuery(context,
filter.getIndexFieldName(),
searchFilter.getOperator(),
searchFilter.getValue(),
discoveryConfiguration);
if (filterQuery != null) {
filterQueries.add(filterQuery.getFilterQuery());
}
}
} catch (SQLException e) {
throw new IllegalArgumentException("There was a problem parsing the search filters.", e);
}
return filterQueries.toArray(new String[filterQueries.size()]);
}
} }

View File

@@ -0,0 +1,70 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery.utils.parameter;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* Representation for a Discovery search filter
*/
public class QueryBuilderSearchFilter {
private String name;
private String operator;
private String value;
public QueryBuilderSearchFilter(final String name, final String operator, final String value) {
this.name = name;
this.operator = operator;
this.value = value;
}
public String getName() {
return name;
}
public String getOperator() {
return operator;
}
public String getValue() {
return value;
}
public String toString() {
return "QueryBuilderSearchFilter{" +
"name='" + name + '\'' +
", operator='" + operator + '\'' +
", value='" + value + '\'' +
'}';
}
public boolean equals(Object object) {
if (object instanceof QueryBuilderSearchFilter) {
QueryBuilderSearchFilter obj = (QueryBuilderSearchFilter) object;
if (!StringUtils.equals(obj.getName(), getName())) {
return false;
}
if (!StringUtils.equals(obj.getOperator(), getOperator())) {
return false;
}
if (!StringUtils.equals(obj.getValue(), getValue())) {
return false;
}
return true;
}
return false;
}
public int hashCode() {
return Objects.hash(name, operator, value);
}
}

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.eperson; package org.dspace.eperson;
import static org.dspace.content.Item.ANY;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
@@ -23,6 +25,7 @@ import org.apache.commons.codec.DecoderException;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory; import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
@@ -30,6 +33,7 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObjectServiceImpl; import org.dspace.content.DSpaceObjectServiceImpl;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.MetadataField; import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
@@ -43,6 +47,7 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService; import org.dspace.eperson.service.GroupService;
import org.dspace.eperson.service.SubscribeService; import org.dspace.eperson.service.SubscribeService;
import org.dspace.event.Event; import org.dspace.event.Event;
import org.dspace.util.UUIDUtils;
import org.dspace.versioning.Version; import org.dspace.versioning.Version;
import org.dspace.versioning.VersionHistory; import org.dspace.versioning.VersionHistory;
import org.dspace.versioning.dao.VersionDAO; import org.dspace.versioning.dao.VersionDAO;
@@ -96,6 +101,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
protected VersionDAO versionDAO; protected VersionDAO versionDAO;
@Autowired(required = true) @Autowired(required = true)
protected ClaimedTaskService claimedTaskService; protected ClaimedTaskService claimedTaskService;
@Autowired
protected OrcidTokenService orcidTokenService;
protected EPersonServiceImpl() { protected EPersonServiceImpl() {
super(); super();
@@ -379,6 +386,8 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
group.getMembers().remove(ePerson); group.getMembers().remove(ePerson);
} }
orcidTokenService.deleteByEPerson(context, ePerson);
// Remove any subscriptions // Remove any subscriptions
subscribeService.deleteByEPerson(context, ePerson); subscribeService.deleteByEPerson(context, ePerson);
@@ -570,6 +579,15 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
return ePersonDAO.countRows(context); return ePersonDAO.countRows(context);
} }
@Override
public EPerson findByProfileItem(Context context, Item profile) throws SQLException {
List<MetadataValue> owners = itemService.getMetadata(profile, "dspace", "object", "owner", ANY);
if (CollectionUtils.isEmpty(owners)) {
return null;
}
return find(context, UUIDUtils.fromString(owners.get(0).getAuthority()));
}
@Override @Override
public String getName(EPerson dso) { public String getName(EPerson dso) {
return dso.getName(); return dso.getName();

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Set; import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataFieldName; import org.dspace.content.MetadataFieldName;
import org.dspace.content.service.DSpaceObjectLegacySupportService; import org.dspace.content.service.DSpaceObjectLegacySupportService;
import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.DSpaceObjectService;
@@ -263,4 +264,16 @@ public interface EPersonService extends DSpaceObjectService<EPerson>, DSpaceObje
* @throws SQLException An exception that provides information on a database access error or other errors. * @throws SQLException An exception that provides information on a database access error or other errors.
*/ */
int countTotal(Context context) throws SQLException; int countTotal(Context context) throws SQLException;
/**
* Find the EPerson related to the given profile item. If the given item is not
* a profile item, null is returned.
*
* @param context The relevant DSpace Context.
* @param profile the profile item to search for
* @return the EPerson, if any
* @throws SQLException An exception that provides information on a database
* access error or other errors.
*/
EPerson findByProfileItem(Context context, Item profile) throws SQLException;
} }

View File

@@ -140,7 +140,7 @@ public class OrcidV3AuthorDataProvider extends AbstractExternalDataProvider {
new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath())); new MetadataValueDTO("person", "identifier", "orcid", null, person.getName().getPath()));
externalDataObject externalDataObject
.addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null, .addMetadata(new MetadataValueDTO("dc", "identifier", "uri", null,
orcidUrl + person.getName().getPath())); orcidUrl + "/" + person.getName().getPath()));
if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) { if (!StringUtils.isBlank(lastName) && !StringUtils.isBlank(firstName)) {
externalDataObject.setDisplayValue(lastName + ", " + firstName); externalDataObject.setDisplayValue(lastName + ", " + firstName);
externalDataObject.setValue(lastName + ", " + firstName); externalDataObject.setValue(lastName + ", " + firstName);

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Cinii metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class CiniiFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "ciniiMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,447 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.cinii;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.dspace.services.ConfigurationService;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Cinii
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class CiniiImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String urlSearch;
@Autowired
private LiveImportClient liveImportClient;
@Autowired
private ConfigurationService configurationService;
@Override
public String getImportSource() {
return "cinii";
}
@Override
public void init() throws Exception {}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isNotEmpty(records) ? records.get(0) : null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Cinii");
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
/**
* This class is a Callable implementation to get CiNii entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, count, null, null, null, start, queryString);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> tmp = search(id, appId);
if (CollectionUtils.isNotEmpty(tmp)) {
tmp.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(tmp);
}
}
return records;
}
}
/**
* This class is a Callable implementation to get an CiNii entry using CiNii ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String id = query.getParameterAsClass("id", String.class);
List<ImportRecord> importRecord = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecord)) {
importRecord.forEach(x -> x.addValue(createIdentifier(id)));
}
return importRecord;
}
}
/**
* This class is a Callable implementation to search CiNii entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new LinkedList<ImportRecord>();
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String appId = configurationService.getProperty("cinii.appid");
List<String> ids = getCiniiIds(appId, maxResult, author, title, year, start, null);
if (CollectionUtils.isNotEmpty(ids)) {
for (String id : ids) {
List<ImportRecord> importRecords = search(id, appId);
if (CollectionUtils.isNotEmpty(importRecords)) {
importRecords.forEach(x -> x.addValue(createIdentifier(id)));
}
records.addAll(importRecords);
}
}
return records;
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an CiNii query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
String appId = configurationService.getProperty("cinii.appid");
String queryString = query.getParameterAsClass("query", String.class);
return countCiniiElement(appId, null, null, null, null, null, queryString);
}
}
/**
* Get metadata by searching CiNii RDF API with CiNii NAID
*
* @param id CiNii NAID to search by
* @param appId registered application identifier for the API
* @return record metadata
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws HttpException Represents a XML/HTTP fault and provides access to the HTTP status code.
*/
protected List<ImportRecord> search(String id, String appId)
throws IOException, HttpException {
try {
List<ImportRecord> records = new LinkedList<ImportRecord>();
URIBuilder uriBuilder = new URIBuilder(this.url + id + ".rdf?appid=" + appId);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
records.add(transformSourceRecords(record));
}
return records;
} catch (URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
return root.getChildren();
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns a list of uri links (for example:https://cir.nii.ac.jp/crid/123456789)
* to the searched CiNii articles
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private List<String> getCiniiIds(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
List<String> ids = new ArrayList<>();
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
if (StringUtils.isNotBlank(appId)) {
uriBuilder.addParameter("appid", appId);
}
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
int url_len = this.url.length() - 1;
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("ns", "http://purl.org/rss/1.0/"),
Namespace.getNamespace("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"));
XPathExpression<Attribute> xpath = XPathFactory.instance().compile("//ns:item/@rdf:about",
Filters.attribute(), null, namespaces);
List<Attribute> recordsList = xpath.evaluate(root);
for (Attribute item : recordsList) {
String value = item.getValue();
if (value.length() > url_len) {
ids.add(value.substring(url_len + 1));
}
}
return ids;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
/**
* Returns the total number of CiNii articles returned by a specific query
*
* @param appId Application ID
* @param maxResult The number of search results per page
* @param author Author name
* @param title Article name
* @param year Year of publication
* @param start Start number for the acquired search result list
* @param query Keyword to be searched
*/
private Integer countCiniiElement(String appId, Integer maxResult, String author, String title,
Integer year, Integer start, String query) {
try {
URIBuilder uriBuilder = new URIBuilder(this.urlSearch);
uriBuilder.addParameter("format", "rss");
uriBuilder.addParameter("appid", appId);
if (Objects.nonNull(maxResult) && maxResult != 0) {
uriBuilder.addParameter("count", maxResult.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("start", start.toString());
}
if (StringUtils.isNotBlank(title)) {
uriBuilder.addParameter("title", title);
}
if (StringUtils.isNotBlank(author)) {
uriBuilder.addParameter("author", author);
}
if (StringUtils.isNotBlank(query)) {
uriBuilder.addParameter("q", query);
}
if (Objects.nonNull(year) && year != -1 && year != 0) {
uriBuilder.addParameter("year_from", String.valueOf(year));
uriBuilder.addParameter("year_to", String.valueOf(year));
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays
.asList(Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//opensearch:totalResults",
Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(root);
if (nodes != null && !nodes.isEmpty()) {
return Integer.parseInt(((Element) nodes.get(0)).getText());
}
return 0;
} catch (JDOMException | IOException | URISyntaxException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private MetadatumDTO createIdentifier(String id) {
MetadatumDTO metadatumDTO = new MetadatumDTO();
metadatumDTO.setSchema("dc");
metadatumDTO.setElement("identifier");
metadatumDTO.setQualifier("other");
metadatumDTO.setValue(id);
return metadatumDTO;
}
}

View File

@@ -74,7 +74,8 @@ public class LiveImportClientImpl implements LiveImportClient {
HttpResponse httpResponse = httpClient.execute(method); HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) { if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException(); throw new RuntimeException("The request failed with: " + getStatusCode(httpResponse) + " code, reason= "
+ httpResponse.getStatusLine().getReasonPhrase());
} }
InputStream inputStream = httpResponse.getEntity().getContent(); InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset()); return IOUtils.toString(inputStream, Charset.defaultCharset());

View File

@@ -0,0 +1,173 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the ScopusID, orcid, author name and affiliationID
* from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
public class AuthorMetadataContributor extends SimpleXpathMetadatumContributor {
private static final Namespace NAMESPACE = Namespace.getNamespace("http://www.w3.org/2005/Atom");
private MetadataFieldConfig orcid;
private MetadataFieldConfig scopusId;
private MetadataFieldConfig authname;
private MetadataFieldConfig affiliation;
private Map<String, String> affId2affName = new HashMap<String, String>();
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query),
* different types of values will be added to the MetadatumDTO list.
*
* @param element A class to retrieve metadata from.
* @return A collection of import records. Only the ScopusID, orcid, author name and affiliation
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
fillAffillation(element);
try {
List<Element> nodes = element.getChildren("author", NAMESPACE);
for (Element el : nodes) {
metadatums = getMetadataOfAuthors(el);
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
return values;
}
/**
* Retrieve the the ScopusID, orcid, author name and affiliationID
* metadata associated with the given element object.
* If the value retrieved from the element is empty
* it is set PLACEHOLDER_PARENT_METADATA_VALUE
*
* @param element A class to retrieve metadata from
* @throws JaxenException If Xpath evaluation failed
*/
private List<MetadatumDTO> getMetadataOfAuthors(Element element) throws JaxenException {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
Element authname = element.getChild("authname", NAMESPACE);
Element scopusId = element.getChild("authid", NAMESPACE);
Element orcid = element.getChild("orcid", NAMESPACE);
Element afid = element.getChild("afid", NAMESPACE);
addMetadatum(metadatums, getMetadata(getElementValue(authname), this.authname));
addMetadatum(metadatums, getMetadata(getElementValue(scopusId), this.scopusId));
addMetadatum(metadatums, getMetadata(getElementValue(orcid), this.orcid));
addMetadatum(metadatums, getMetadata(StringUtils.isNotBlank(afid.getValue())
? this.affId2affName.get(afid.getValue()) : null, this.affiliation));
return metadatums;
}
private void addMetadatum(List<MetadatumDTO> list, MetadatumDTO metadatum) {
if (Objects.nonNull(metadatum)) {
list.add(metadatum);
}
}
private String getElementValue(Element element) {
if (Objects.nonNull(element)) {
return element.getValue();
}
return StringUtils.EMPTY;
}
private MetadatumDTO getMetadata(String value, MetadataFieldConfig metadaConfig) {
if (StringUtils.isBlank(value)) {
return null;
}
MetadatumDTO metadata = new MetadatumDTO();
metadata.setElement(metadaConfig.getElement());
metadata.setQualifier(metadaConfig.getQualifier());
metadata.setSchema(metadaConfig.getSchema());
metadata.setValue(value);
return metadata;
}
private void fillAffillation(Element element) {
try {
List<Element> nodes = element.getChildren("affiliation", NAMESPACE);
for (Element el : nodes) {
fillAffiliation2Name(el);
}
} catch (JaxenException e) {
throw new RuntimeException(e);
}
}
private void fillAffiliation2Name(Element element) throws JaxenException {
Element affilationName = element.getChild("affilname", NAMESPACE);
Element affilationId = element.getChild("afid", NAMESPACE);
if (Objects.nonNull(affilationId) && Objects.nonNull(affilationName)) {
affId2affName.put(affilationId.getValue(), affilationName.getValue());
}
}
public MetadataFieldConfig getAuthname() {
return authname;
}
public void setAuthname(MetadataFieldConfig authname) {
this.authname = authname;
}
public MetadataFieldConfig getOrcid() {
return orcid;
}
public void setOrcid(MetadataFieldConfig orcid) {
this.orcid = orcid;
}
public MetadataFieldConfig getScopusId() {
return scopusId;
}
public void setScopusId(MetadataFieldConfig scopusId) {
this.scopusId = scopusId;
}
public MetadataFieldConfig getAffiliation() {
return affiliation;
}
public void setAffiliation(MetadataFieldConfig affiliation) {
this.affiliation = affiliation;
}
}

View File

@@ -0,0 +1,110 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* Scopus specific implementation of {@link MetadataContributor}
* Responsible for generating the Scopus startPage and endPage from the retrieved item.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class PageRangeXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private MetadataFieldConfig startPageMetadata;
private MetadataFieldConfig endPageMetadata;
/**
* Retrieve the metadata associated with the given Element object.
* Depending on the retrieved node (using the query),
* StartPage and EndPage values will be added to the MetadatumDTO list
*
* @param el A class to retrieve metadata from.
* @return A collection of import records. Only the StartPage and EndPage
* of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element el) {
List<MetadatumDTO> values = new LinkedList<>();
List<MetadatumDTO> metadatums = null;
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = el.getChildren(query, Namespace.getNamespace(ns));
for (Element element : nodes) {
metadatums = getMetadatum(element.getValue());
if (Objects.nonNull(metadatums)) {
for (MetadatumDTO metadatum : metadatums) {
values.add(metadatum);
}
}
}
}
return values;
}
private List<MetadatumDTO> getMetadatum(String value) {
List<MetadatumDTO> metadatums = new ArrayList<MetadatumDTO>();
if (StringUtils.isBlank(value)) {
return null;
}
String [] range = value.split("-");
if (range.length == 2) {
metadatums.add(setStartPage(range));
metadatums.add(setEndPage(range));
} else if (range.length != 0) {
metadatums.add(setStartPage(range));
}
return metadatums;
}
private MetadatumDTO setEndPage(String[] range) {
MetadatumDTO endPage = new MetadatumDTO();
endPage.setValue(range[1]);
endPage.setElement(endPageMetadata.getElement());
endPage.setQualifier(endPageMetadata.getQualifier());
endPage.setSchema(endPageMetadata.getSchema());
return endPage;
}
private MetadatumDTO setStartPage(String[] range) {
MetadatumDTO startPage = new MetadatumDTO();
startPage.setValue(range[0]);
startPage.setElement(startPageMetadata.getElement());
startPage.setQualifier(startPageMetadata.getQualifier());
startPage.setSchema(startPageMetadata.getSchema());
return startPage;
}
public MetadataFieldConfig getStartPageMetadata() {
return startPageMetadata;
}
public void setStartPageMetadata(MetadataFieldConfig startPageMetadata) {
this.startPageMetadata = startPageMetadata;
}
public MetadataFieldConfig getEndPageMetadata() {
return endPageMetadata;
}
public void setEndPageMetadata(MetadataFieldConfig endPageMetadata) {
this.endPageMetadata = endPageMetadata;
}
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
/**
* This contributor replace specific character in the metadata value.
* It is useful for some provider (e.g. Scopus) which use containing "/" character.
* Actually, "/" will never encode by framework in URL building. In the same ways, if we
* encode "/" -> %2F, it will be encoded by framework and become %252F.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science.com)
*/
public class ReplaceCharacterXPathMetadataContributor extends SimpleXpathMetadatumContributor {
private char characterToBeReplaced;
private char characterToReplaceWith;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
List<Element> nodes = element.getChildren(query, Namespace.getNamespace(ns));
for (Element el : nodes) {
values.add(getMetadatum(field, el.getValue()));
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (Objects.isNull(field)) {
return null;
}
dcValue.setValue(value == null ? null : value.replace(characterToBeReplaced, characterToReplaceWith));
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
public void setCharacterToBeReplaced(int characterToBeReplaced) {
this.characterToBeReplaced = (char)characterToBeReplaced;
}
public void setCharacterToReplaceWith(int characterToReplaceWith) {
this.characterToReplaceWith = (char)characterToReplaceWith;
}
}

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor is able to concat multi value.
* Given a certain path, if it contains several nodes,
* the values of nodes will be concatenated into a single one.
* The concrete example we can see in the file wos-responce.xml in the <abstract_text> node,
* which may contain several <p> paragraphs,
* this Contributor allows concatenating all <p> paragraphs. to obtain a single one.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleConcatContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
StringBuilder text = new StringBuilder();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
if (StringUtils.isNotBlank(element.getText())) {
text.append(element.getText());
}
} else {
log.warn("node of type: " + el.getClass());
}
}
if (StringUtils.isNotBlank(text.toString())) {
values.add(metadataFieldMapping.toDCValue(field, text.toString()));
}
return values;
}
}

View File

@@ -0,0 +1,75 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web of Science specific implementation of {@link MetadataContributor}.
* This contributor can perform research on multi-paths.
* For example, to populate the subject metadata, in the Web of Science response
* the values are contained in different paths,
* so this Contributor allows you to collect the values by configuring the paths in the paths list.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class SimpleMultiplePathContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private List<String> paths;
public SimpleMultiplePathContributor() {}
public SimpleMultiplePathContributor(List<String> paths) {
this.paths = paths;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
for (String path : this.paths) {
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(metadataFieldMapping.toDCValue(field, ((Element) el).getText()));
} else {
log.warn("node of type: " + el.getClass());
}
}
}
return values;
}
public List<String> getPaths() {
return paths;
}
public void setPaths(List<String> paths) {
this.paths = paths;
}
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor checks for each node returned for the supplied path
* if node contains supplied attribute - the value of the current node is taken if exist.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot com)
*/
public class SimpleXpathMetadatumAndAttributeContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
private String attribute;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
if (StringUtils.isNotBlank(attributeValue)) {
values.add(metadataFieldMapping.toDCValue(this.field, attributeValue));
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
}

View File

@@ -34,10 +34,10 @@ import org.springframework.beans.factory.annotation.Autowired;
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> { public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field;
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
protected MetadataFieldConfig field;
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
* *
@@ -171,4 +171,5 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
} }
return values; return values;
} }
}
}

View File

@@ -0,0 +1,160 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
* This contributor checks for each node returned for the given path if the node contains "this.attribute"
* and then checks if the attribute value is one of the values configured
* in the "this.attributeValue2metadata" map, if the value of the current known is taken.
* If "this.firstChild" is true, it takes the value of the child of the known.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosAttribute2ValueContributor implements MetadataContributor<Element> {
private final static Logger log = LogManager.getLogger();
private String query;
private String attribute;
private boolean firstChild;
private String childName;
private Map<String, String> prefixToNamespaceMapping;
private Map<String, MetadataFieldConfig> attributeValue2metadata;
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
public WosAttribute2ValueContributor() {}
public WosAttribute2ValueContributor(String query,
Map<String, String> prefixToNamespaceMapping,
Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.attributeValue2metadata = attributeValue2metadata;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = (Element) el;
String attributeValue = element.getAttributeValue(this.attribute);
setField(attributeValue, element, values);
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setField(String attributeValue, Element el, List<MetadatumDTO> values) {
for (String id : attributeValue2metadata.keySet()) {
if (StringUtils.equals(id, attributeValue)) {
if (this.firstChild) {
String value = el.getChild(this.childName).getValue();
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), value));
} else {
values.add(metadataFieldMapping.toDCValue(attributeValue2metadata.get(id), el.getText()));
}
}
}
}
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
public String getAttribute() {
return attribute;
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
public Map<String, MetadataFieldConfig> getAttributeValue2metadata() {
return attributeValue2metadata;
}
public void setAttributeValue2metadata(Map<String, MetadataFieldConfig> attributeValue2metadata) {
this.attributeValue2metadata = attributeValue2metadata;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public boolean isFirstChild() {
return firstChild;
}
public void setFirstChild(boolean firstChild) {
this.firstChild = firstChild;
}
public String getChildName() {
return childName;
}
public void setChildName(String childName) {
this.childName = childName;
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can retrieve the identifiers
* configured in "this.identifire2field" from the Web of Science response.
* The mapping and configuration of this class can be found in the following wos-integration.xml file.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierContributor extends SimpleXpathMetadatumContributor {
protected Map<String, MetadataFieldConfig> identifier2field;
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Element> xpath =
XPathFactory.instance().compile(query, Filters.element(), null, namespaces);
List<Element> nodes = xpath.evaluate(element);
for (Element el : nodes) {
String type = el.getAttributeValue("type");
setIdentyfier(type, el, values);
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
for (String id : identifier2field.keySet()) {
if (StringUtils.equals(id, type)) {
String value = el.getAttributeValue("value");
values.add(metadataFieldMapping.toDCValue(identifier2field.get(id), value));
}
}
}
public Map<String, MetadataFieldConfig> getIdentifier2field() {
return identifier2field;
}
public void setIdentifier2field(Map<String, MetadataFieldConfig> identifier2field) {
this.identifier2field = identifier2field;
}
}

View File

@@ -0,0 +1,68 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* Web Of Science specific implementation of {@link MetadataContributor}
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WosIdentifierRidContributor extends SimpleXpathMetadatumContributor {
private final static Logger log = LogManager.getLogger();
@Override
public Collection<MetadatumDTO> contributeMetadata(Element t) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) {
if (el instanceof Element) {
Element element = ((Element) el).getChild("name");
if (Objects.nonNull(element)) {
String type = element.getAttributeValue("role");
setIdentyfier(type, element, values);
}
} else {
log.warn("node of type: " + el.getClass());
}
}
return values;
}
private void setIdentyfier(String type, Element el, List<MetadatumDTO> values) {
if (StringUtils.equals("researcher_id", type)) {
String value = el.getAttributeValue("r_id");
if (StringUtils.isNotBlank(value)) {
values.add(metadataFieldMapping.toDCValue(this.field, value));
}
}
}
}

View File

@@ -14,23 +14,23 @@ import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader; import java.io.StringReader;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord; import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query; import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.FileMultipleOccurencesException; import org.dspace.importer.external.exception.FileMultipleOccurencesException;
import org.dspace.importer.external.exception.FileSourceException; import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.exception.MetadataSourceException; import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService; import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.FileSource; import org.dspace.importer.external.service.components.FileSource;
import org.dspace.importer.external.service.components.QuerySource; import org.dspace.importer.external.service.components.QuerySource;
@@ -41,6 +41,7 @@ import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder; import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression; import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory; import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implements a data source for querying PubMed Central * Implements a data source for querying PubMed Central
@@ -51,13 +52,16 @@ import org.jdom2.xpath.XPathFactory;
public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element> public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource, FileSource { implements QuerySource, FileSource {
private String baseAddress; private String urlFetch;
private String urlSearch;
// it is protected so that subclass can mock it for testing private int attempt = 3;
protected WebTarget pubmedWebTarget;
private List<String> supportedExtensions; private List<String> supportedExtensions;
@Autowired
private LiveImportClient liveImportClient;
/** /**
* Set the file extensions supported by this metadata service * Set the file extensions supported by this metadata service
* *
@@ -187,29 +191,7 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
* @throws Exception on generic exception * @throws Exception on generic exception
*/ */
@Override @Override
public void init() throws Exception { public void init() throws Exception {}
Client client = ClientBuilder.newClient();
WebTarget webTarget = client.target(baseAddress);
pubmedWebTarget = webTarget.queryParam("db", "pubmed");
}
/**
* Return the baseAddress set to this object
*
* @return The String object that represents the baseAddress of this object
*/
public String getBaseAddress() {
return baseAddress;
}
/**
* Set the baseAddress to this object
*
* @param baseAddress The String object that represents the baseAddress of this object
*/
public void setBaseAddress(String baseAddress) {
this.baseAddress = baseAddress;
}
private class GetNbRecords implements Callable<Integer> { private class GetNbRecords implements Callable<Integer> {
@@ -226,24 +208,27 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Integer call() throws Exception { public Integer call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("query", String.class)); uriBuilder.addParameter("db", "pubmed");
uriBuilder.addParameter("term", query.getParameterAsClass("query", String.class));
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); return Integer.parseInt(getSingleElementValue(response, "Count"));
Response response = invocationBuilder.get();
String responseString = response.readEntity(String.class);
String count = getSingleElementValue(responseString, "Count");
return Integer.parseInt(count);
} }
} }
private String getSingleElementValue(String src, String elementName) { private String getSingleElementValue(String src, String elementName) {
String value = null; String value = null;
@@ -286,41 +271,61 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
Integer start = query.getParameterAsClass("start", Integer.class); Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class); Integer count = query.getParameterAsClass("count", Integer.class);
if (count == null || count < 0) { if (Objects.isNull(count) || count < 0) {
count = 10; count = 10;
} }
if (start == null || start < 0) { if (Objects.isNull(start) || start < 0) {
start = 0; start = 0;
} }
List<ImportRecord> records = new LinkedList<ImportRecord>(); List<ImportRecord> records = new LinkedList<ImportRecord>();
WebTarget getRecordIdsTarget = pubmedWebTarget.queryParam("term", queryString); URIBuilder uriBuilder = new URIBuilder(urlSearch);
getRecordIdsTarget = getRecordIdsTarget.queryParam("retstart", start); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget.queryParam("retmax", count); uriBuilder.addParameter("retstart", start.toString());
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("retmax", count.toString());
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi"); uriBuilder.addParameter("usehistory", "y");
uriBuilder.addParameter("term", queryString);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
Response response = invocationBuilder.get(); String queryKey = getSingleElementValue(response, "QueryKey");
String responseString = response.readEntity(String.class); String webEnv = getSingleElementValue(response, "WebEnv");
String queryKey = getSingleElementValue(responseString, "QueryKey"); URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
String webEnv = getSingleElementValue(responseString, "WebEnv"); uriBuilder2.addParameter("db", "pubmed");
uriBuilder2.addParameter("retstart", start.toString());
uriBuilder2.addParameter("retmax", count.toString());
uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
uriBuilder2.addParameter("retmode", "xml");
Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); if (StringUtils.isBlank(response2)) {
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); throw new RuntimeException("After " + attempt
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); + " attempts to contact the PubMed service, a correct answer could not be received."
getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); + " The request was made with this URL:" + uriBuilder2.toString());
getRecordsTarget = getRecordsTarget.queryParam("retmax", count); }
getRecordsTarget = getRecordsTarget.queryParam("retstart", start);
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); List<Element> elements = splitToRecords(response2);
response = invocationBuilder.get();
List<Element> elements = splitToRecords(response.readEntity(String.class));
for (Element record : elements) { for (Element record : elements) {
records.add(transformSourceRecords(record)); records.add(transformSourceRecords(record));
@@ -361,23 +366,29 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public ImportRecord call() throws Exception { public ImportRecord call() throws Exception {
String id = query.getParameterAsClass("id", String.class);
WebTarget getRecordTarget = pubmedWebTarget.queryParam("id", id); URIBuilder uriBuilder = new URIBuilder(urlFetch);
getRecordTarget = getRecordTarget.queryParam("retmode", "xml"); uriBuilder.addParameter("db", "pubmed");
getRecordTarget = getRecordTarget.path("efetch.fcgi"); uriBuilder.addParameter("retmode", "xml");
uriBuilder.addParameter("id", query.getParameterAsClass("id", String.class));
Invocation.Builder invocationBuilder = getRecordTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
Response response = invocationBuilder.get(); int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
List<Element> elements = splitToRecords(response.readEntity(String.class)); countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
if (elements.isEmpty()) {
return null;
} }
return transformSourceRecords(elements.get(0)); if (StringUtils.isBlank(response)) {
throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
List<Element> elements = splitToRecords(response);
return elements.isEmpty() ? null : transformSourceRecords(elements.get(0));
} }
} }
@@ -396,40 +407,57 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
@Override @Override
public Collection<ImportRecord> call() throws Exception { public Collection<ImportRecord> call() throws Exception {
WebTarget getRecordIdsTarget = pubmedWebTarget URIBuilder uriBuilder = new URIBuilder(urlSearch);
.queryParam("term", query.getParameterAsClass("term", String.class)); uriBuilder.addParameter("db", "pubmed");
getRecordIdsTarget = getRecordIdsTarget uriBuilder.addParameter("usehistory", "y");
.queryParam("field", query.getParameterAsClass("field", String.class)); uriBuilder.addParameter("term", query.getParameterAsClass("term", String.class));
getRecordIdsTarget = getRecordIdsTarget.queryParam("usehistory", "y"); uriBuilder.addParameter("field", query.getParameterAsClass("field", String.class));
getRecordIdsTarget = getRecordIdsTarget.path("esearch.fcgi");
Invocation.Builder invocationBuilder = getRecordIdsTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = StringUtils.EMPTY;
int countAttempt = 0;
while (StringUtils.isBlank(response) && countAttempt <= attempt) {
countAttempt++;
response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
Response response = invocationBuilder.get(); if (StringUtils.isBlank(response)) {
String responseString = response.readEntity(String.class); throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder.toString());
}
String queryKey = getSingleElementValue(responseString, "QueryKey"); String webEnv = getSingleElementValue(response, "WebEnv");
String webEnv = getSingleElementValue(responseString, "WebEnv"); String queryKey = getSingleElementValue(response, "QueryKey");
WebTarget getRecordsTarget = pubmedWebTarget.queryParam("WebEnv", webEnv); URIBuilder uriBuilder2 = new URIBuilder(urlFetch);
getRecordsTarget = getRecordsTarget.queryParam("query_key", queryKey); uriBuilder2.addParameter("db", "pubmed");
getRecordsTarget = getRecordsTarget.queryParam("retmode", "xml"); uriBuilder2.addParameter("retmode", "xml");
getRecordsTarget = getRecordsTarget.path("efetch.fcgi"); uriBuilder2.addParameter("WebEnv", webEnv);
uriBuilder2.addParameter("query_key", queryKey);
invocationBuilder = getRecordsTarget.request(MediaType.TEXT_PLAIN_TYPE); Map<String, Map<String, String>> params2 = new HashMap<String, Map<String,String>>();
response = invocationBuilder.get(); String response2 = StringUtils.EMPTY;
countAttempt = 0;
while (StringUtils.isBlank(response2) && countAttempt <= attempt) {
countAttempt++;
response2 = liveImportClient.executeHttpGetRequest(1000, uriBuilder2.toString(), params2);
}
String xml = response.readEntity(String.class); if (StringUtils.isBlank(response2)) {
return parseXMLString(xml); throw new RuntimeException("After " + attempt
+ " attempts to contact the PubMed service, a correct answer could not be received."
+ " The request was made with this URL:" + uriBuilder2.toString());
}
return parseXMLString(response2);
} }
} }
@Override @Override
public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException { public List<ImportRecord> getRecords(InputStream inputStream) throws FileSourceException {
String xml = null;
try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) { try (Reader reader = new InputStreamReader(inputStream, "UTF-8")) {
xml = CharStreams.toString(reader); String xml = CharStreams.toString(reader);
return parseXMLString(xml); return parseXMLString(xml);
} catch (IOException e) { } catch (IOException e) {
throw new FileSourceException ("Cannot read XML from InputStream", e); throw new FileSourceException ("Cannot read XML from InputStream", e);
@@ -456,4 +484,21 @@ public class PubmedImportMetadataSourceServiceImpl extends AbstractImportMetadat
} }
return records; return records;
} }
}
public String getUrlFetch() {
return urlFetch;
}
public void setUrlFetch(String urlFetch) {
this.urlFetch = urlFetch;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the PubmedEurope metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class PubmedEuropeFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "pubmedEuropeMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,419 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.pubmedeurope;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
/**
* Implements a data source for querying PubMed Europe
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class PubmedEuropeMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "pubmedeu";
}
/**
* Get a single record from the PubMed Europe.
*
* @param id Identifier for the record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Find the number of records matching a query;
*
* @param query a query string to base the search on.
* @return the sum of the matching records over this import source
* @throws MetadataSourceException if the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find the number of records matching a query;
*
* @param query A query string to base the search on.
* @return The sum of the matching records over this import source
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
/**
* Find records based on a object query.
*
* @param query A query object to base the search on.
* @return A set of records. Fully transformed.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
/**
* Get a single record from the PubMed Europe.
*
* @param query A query matching a single record
* @return The first matching record
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
/**
* Finds records based on query object.
*
* @param query A query object to base the search on.
* @return A collection of import records.
* @throws MetadataSourceException If the underlying methods throw any exception.
*/
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for PubMed Europe");
}
@Override
public void init() throws Exception {}
public List<ImportRecord> getByPubmedEuropeID(String pubmedID, Integer start, Integer size)
throws IOException, HttpException {
String query = "(EXT_ID:" + pubmedID + ")";
return search(query, size < 1 ? 1 : size, start);
}
/**
* This class is a Callable implementation to get PubMed Europe entries based on
* query object.
*
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String queryString = query.getParameterAsClass("query", String.class);
return search(queryString, count, start);
}
}
/**
* This class is a Callable implementation to get an PubMed Europe entry using PubMed Europe ID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
return getByPubmedEuropeID(query.getParameterAsClass("id", String.class), 1 ,0);
}
}
/**
* This class is a Callable implementation to search PubMed Europe entries
* using author, title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String title = query.getParameterAsClass("title", String.class);
String author = query.getParameterAsClass("author", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
Integer maxResult = query.getParameterAsClass("maxResult", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
return search(title, author, year, maxResult, start);
}
}
/**
* This class is a Callable implementation to count the number
* of entries for an PubMed Europe query.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
try {
return count(query.getParameterAsClass("query", String.class));
} catch (Exception e) {
throw new RuntimeException();
}
}
}
/**
* Returns the total number of PubMed Europe publications returned by a specific query
*
* @param query A keyword or combination of keywords to be searched
* @throws URISyntaxException If URI syntax error
* @throws ClientProtocolException The client protocol exception
* @throws IOException If IO error
* @throws JaxenException If Xpath evaluation failed
*/
public Integer count(String query) throws URISyntaxException, ClientProtocolException, IOException, JaxenException {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, buildURI(1, query), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
Element element = root.getChild("hitCount");
return Integer.parseInt(element.getValue());
} catch (JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
public List<ImportRecord> search(String title, String author, int year, int count, int start)
throws IOException {
StringBuffer query = new StringBuffer();
query.append("(");
if (StringUtils.isNotBlank(title)) {
query.append("TITLE:").append(title);
query.append(")");
}
if (StringUtils.isNotBlank(author)) {
// Search for a surname and (optionally) initial(s) in publication author lists
// AUTH:einstein, AUTH:”Smith AB”
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
if (query.length() > 0) {
query.append(" AND ");
}
query.append("(");
int countAuthors = 0;
for (String auth : authors) {
countAuthors++;
query.append("AUTH:\"").append(auth).append("\"");
if (countAuthors < authors.length) {
query.append(" AND ");
}
}
query.append(")");
}
if (year != -1) {
if (query.length() > 0) {
query.append(" AND ");
}
query.append("( PUB_YEAR:").append(year).append(")");
}
query.append(")");
return search(query.toString(), count, start);
}
/**
* Returns a list of PubMed Europe publication records
*
* @param query A keyword or combination of keywords to be searched
* @param size The number of search results per page
* @param start Start number for the acquired search result list
* @throws IOException If IO error
*/
public List<ImportRecord> search(String query, Integer size, Integer start) throws IOException {
List<ImportRecord> results = new ArrayList<>();
try {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(size));
uriBuilder.addParameter("query", query);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
boolean lastPage = false;
int skipped = 0;
while (!lastPage || results.size() < size) {
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
String cursorMark = StringUtils.EMPTY;
if (StringUtils.isNotBlank(response)) {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
XPathFactory xpfac = XPathFactory.instance();
XPathExpression<Element> xPath = xpfac.compile("//responseWrapper/resultList/result",
Filters.element());
List<Element> records = xPath.evaluate(document);
if (records.size() > 0) {
for (Element item : records) {
if (start > skipped) {
skipped++;
} else {
results.add(transformSourceRecords(item));
}
}
} else {
lastPage = true;
break;
}
Element root = document.getRootElement();
Element nextCursorMark = root.getChild("nextCursorMark");
cursorMark = Objects.nonNull(nextCursorMark) ? nextCursorMark.getValue() : StringUtils.EMPTY;
}
if (StringUtils.isNotBlank(cursorMark)) {
uriBuilder.setParameter("cursorMar", cursorMark);
} else {
lastPage = true;
}
}
} catch (URISyntaxException | JDOMException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
return results;
}
private String buildURI(Integer pageSize, String query) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("format", "xml");
uriBuilder.addParameter("resulttype", "core");
uriBuilder.addParameter("pageSize", String.valueOf(pageSize));
uriBuilder.addParameter("query", query);
return uriBuilder.toString();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Scopus metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class ScopusFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "scopusMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,421 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scopus.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.URI_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Scopus
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com)
*/
public class ScopusImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private int timeout = 1000;
int itemPerPage = 25;
private String url;
private String apiKey;
private String instKey;
private String viewMode;
@Autowired
private LiveImportClient liveImportClient;
public LiveImportClient getLiveImportClient() {
return liveImportClient;
}
public void setLiveImportClient(LiveImportClient liveImportClient) {
this.liveImportClient = liveImportClient;
}
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "scopus";
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query)).size();
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString())).size();
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchNBByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (isEID(query)) {
return retry(new FindByIdCallable(query));
}
if (DoiCheck.isDoi(query)) {
query = DoiCheck.purgeDoiValue(query);
}
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = null;
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
if (isEID(query.toString())) {
records = retry(new FindByIdCallable(query.toString()));
} else {
records = retry(new SearchByQueryCallable(query));
}
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scopus");
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
if (isEID(query.toString())) {
return retry(new FindByIdCallable(query.toString()));
}
if (DoiCheck.isDoi(query.toString())) {
query.addParameter("query", DoiCheck.purgeDoiValue(query.toString()));
}
return retry(new FindByQueryCallable(query));
}
private boolean isEID(String query) {
Pattern pattern = Pattern.compile("2-s2\\.0-\\d+");
Matcher match = pattern.matcher(query);
if (match.matches()) {
return true;
}
return false;
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
// Execute the request.
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(query, null, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("opensearch", "http://a9.com/-/spec/opensearch/1.1/"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("opensearch:totalResults", Filters.element(), null, namespaces);
Element count = xpath.evaluateFirst(root);
try {
return Integer.parseInt(count.getText());
} catch (NumberFormatException e) {
return null;
}
}
return null;
}
}
/**
* This class is a Callable implementation to get a Scopus entry using EID
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String eid;
private FindByIdCallable(String eid) {
this.eid = eid;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "EID(" + eid.replace("!", "/") + ")";
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, null, null);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* This class implements a callable to get the items based on query parameters
*/
private class FindByQueryCallable implements Callable<List<ImportRecord>> {
private String title;
private String author;
private Integer year;
private Integer start;
private Integer count;
private FindByQueryCallable(Query query) {
this.title = query.getParameterAsClass("title", String.class);
this.year = query.getParameterAsClass("year", Integer.class);
this.author = query.getParameterAsClass("author", String.class);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = "";
StringBuffer query = new StringBuffer();
if (StringUtils.isNotBlank(title)) {
query.append("title(").append(title).append("");
}
if (StringUtils.isNotBlank(author)) {
// [FAU]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("AUTH(").append(author).append(")");
}
if (year != -1) {
// [DP]
if (query.length() > 0) {
query.append(" AND ");
}
query.append("PUBYEAR IS ").append(year);
}
queryString = query.toString();
if (apiKey != null && !apiKey.equals("")) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = query.getParameterAsClass("query", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> requestParams = getRequestParameters(queryString, viewMode, start, count);
params.put(URI_PARAMETERS, requestParams);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
private Map<String, String> getRequestParameters(String query, String viewMode, Integer start, Integer count) {
Map<String, String> params = new HashMap<String, String>();
params.put("httpAccept", "application/xml");
params.put("apiKey", apiKey);
params.put("query", query);
if (StringUtils.isNotBlank(instKey)) {
params.put("insttoken", instKey);
}
if (StringUtils.isNotBlank(viewMode)) {
params.put("view", viewMode);
}
params.put("start", (Objects.nonNull(start) ? start + "" : "0"));
params.put("count", (Objects.nonNull(count) ? count + "" : "20"));
return params;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Element> records = root.getChildren("entry",Namespace.getNamespace("http://www.w3.org/2005/Atom"));
return records;
} catch (JDOMException | IOException e) {
return new ArrayList<Element>();
}
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getViewMode() {
return viewMode;
}
public void setViewMode(String viewMode) {
this.viewMode = viewMode;
}
public String getApiKey() {
return apiKey;
}
public String getInstKey() {
return instKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public void setInstKey(String instKey) {
this.instKey = instKey;
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Web of Science metadatum fields on the DSpace metadatum fields
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class WOSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve
* metadata and metadata that will be set to the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "wosMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,329 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.wos.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Web of Science.
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class WOSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private static final String AI_PATTERN = "^AI=(.*)";
private static final Pattern ISI_PATTERN = Pattern.compile("^\\d{15}$");
private int timeout = 1000;
private String url;
private String urlSearch;
private String apiKey;
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "wos";
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByQueryCallable(query));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return records == null || records.isEmpty() ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for WOS");
}
/**
* This class implements a callable to get the numbers of result
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
if (StringUtils.isNotBlank(apiKey)) {
String queryString = URLEncoder.encode(checkQuery(query), StandardCharsets.UTF_8);
String url = urlSearch + queryString + "&count=1&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
XPathExpression<Element> xpath = XPathFactory.instance().compile("//*[@name=\"RecordsFound\"]",
Filters.element(), null);
Element tot = xpath.evaluateFirst(root);
return Integer.valueOf(tot.getValue());
}
return null;
}
}
/**
* This class is a Callable implementation to get a Web of Science entry using Doi
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String doi;
private FindByIdCallable(String doi) {
this.doi = URLEncoder.encode(doi, StandardCharsets.UTF_8);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
if (StringUtils.isNotBlank(apiKey)) {
String urlString = url + this.doi + "?databaseId=WOS&lang=en&count=10&firstRecord=1";
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String response = liveImportClient.executeHttpGetRequest(timeout, urlString, params);
List<Element> elements = splitToRecords(response);
for (Element record : elements) {
results.add(transformSourceRecords(record));
}
}
return results;
}
}
/**
* Find records matching a string query.
*
* @param query A query string to base the search on.
* @param start Offset to start at
* @param count Number of records to retrieve.
* @return A set of records. Fully transformed.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String queryString = checkQuery(query.getParameterAsClass("query", String.class));
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
if (StringUtils.isNotBlank(apiKey)) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
params.put(HEADER_PARAMETERS, getRequestParameters());
String url = urlSearch + URLEncoder.encode(queryString, StandardCharsets.UTF_8)
+ "&count=" + count + "&firstRecord=" + (start + 1);
String response = liveImportClient.executeHttpGetRequest(timeout, url, params);
List<Element> omElements = splitToRecords(response);
for (Element el : omElements) {
results.add(transformSourceRecords(el));
}
}
return results;
}
}
private Map<String, String> getRequestParameters() {
Map<String, String> params = new HashMap<String, String>();
params.put("Accept", "application/xml");
params.put("X-ApiKey", this.apiKey);
return params;
}
/**
* This method check if the query contain
* "AI=(...)" Author Identifier or a DOI "DO=(query)"
* or Accession Number "UT=(query)".
* Otherwise the value is placed in TS=(query) tag
* that searches for topic terms in the following fields within a document:
* Title, Abstract, Author keywords, Keywords Plus
*
* @param query
*/
private String checkQuery(String query) {
Pattern risPattern = Pattern.compile(AI_PATTERN);
Matcher risMatcher = risPattern.matcher(query.trim());
if (risMatcher.matches()) {
return query;
}
if (DoiCheck.isDoi(query)) {
// FIXME: workaround to be removed once fixed by the community the double post of query param
if (query.startsWith(",")) {
query = query.substring(1);
}
return "DO=(" + query + ")";
} else if (isIsi(query)) {
return "UT=(" + query + ")";
}
StringBuilder queryBuilder = new StringBuilder("TS=(");
queryBuilder.append(query).append(")");
return queryBuilder.toString();
}
private boolean isIsi(String query) {
if (query.startsWith("WOS:")) {
return true;
}
Matcher matcher = ISI_PATTERN.matcher(query.trim());
return matcher.matches();
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
String cData = XPathFactory.instance().compile("//*[@name=\"Records\"]",
Filters.element(), null).evaluate(root).get(0).getValue().trim();
Document intDocument = saxBuilder.build(new StringReader(cData));
XPathExpression<Element> xPath = XPathFactory.instance().compile("*", Filters.element(), null);
List<Element> records = xPath.evaluate(intDocument.getRootElement());
if (CollectionUtils.isNotEmpty(records)) {
return records;
}
} catch (JDOMException | IOException e) {
log.error(e.getMessage());
return new ArrayList<Element>();
}
return new ArrayList<Element>();
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
}

View File

@@ -10,6 +10,7 @@ package org.dspace.scripts;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.EnumType; import javax.persistence.EnumType;
@@ -33,6 +34,7 @@ import org.dspace.content.Bitstream;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/** /**
* This class is the DB Entity representation of the Process object to be stored in the Database * This class is the DB Entity representation of the Process object to be stored in the Database
@@ -77,6 +79,17 @@ public class Process implements ReloadableEntity<Integer> {
) )
private List<Bitstream> bitstreams; private List<Bitstream> bitstreams;
/*
* Special Groups associated with this Process
*/
@ManyToMany(fetch = FetchType.LAZY, cascade = {CascadeType.PERSIST})
@JoinTable(
name = "process2group",
joinColumns = {@JoinColumn(name = "process_id")},
inverseJoinColumns = {@JoinColumn(name = "group_id")}
)
private List<Group> groups;
@Column(name = "creation_time", nullable = false) @Column(name = "creation_time", nullable = false)
@Temporal(TemporalType.TIMESTAMP) @Temporal(TemporalType.TIMESTAMP)
private Date creationTime; private Date creationTime;
@@ -211,6 +224,21 @@ public class Process implements ReloadableEntity<Integer> {
return creationTime; return creationTime;
} }
/**
* This method sets the special groups associated with the Process.
*/
public List<Group> getGroups() {
return groups;
}
/**
* This method will return special groups associated with the Process.
* @return The special groups of this process.
*/
public void setGroups(List<Group> groups) {
this.groups = groups;
}
/** /**
* Return <code>true</code> if <code>other</code> is the same Process * Return <code>true</code> if <code>other</code> is the same Process
* as this object, <code>false</code> otherwise * as this object, <code>false</code> otherwise

View File

@@ -21,6 +21,7 @@ import java.util.Comparator;
import java.util.Date; import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -43,6 +44,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -74,13 +76,21 @@ public class ProcessServiceImpl implements ProcessService {
@Override @Override
public Process create(Context context, EPerson ePerson, String scriptName, public Process create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) throws SQLException { List<DSpaceCommandLineParameter> parameters,
final Set<Group> specialGroups) throws SQLException {
Process process = new Process(); Process process = new Process();
process.setEPerson(ePerson); process.setEPerson(ePerson);
process.setName(scriptName); process.setName(scriptName);
process.setParameters(DSpaceCommandLineParameter.concatenate(parameters)); process.setParameters(DSpaceCommandLineParameter.concatenate(parameters));
process.setCreationTime(new Date()); process.setCreationTime(new Date());
Optional.ofNullable(specialGroups)
.ifPresent(sg -> {
// we use a set to be sure no duplicated special groups are stored with process
Set<Group> specialGroupsSet = new HashSet<>(sg);
process.setGroups(new ArrayList<>(specialGroupsSet));
});
Process createdProcess = processDAO.create(context, process); Process createdProcess = processDAO.create(context, process);
log.info(LogHelper.getHeader(context, "process_create", log.info(LogHelper.getHeader(context, "process_create",
"Process has been created for eperson with email " + ePerson.getEmail() "Process has been created for eperson with email " + ePerson.getEmail()

View File

@@ -10,7 +10,9 @@ package org.dspace.scripts.handler;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -114,4 +116,12 @@ public interface DSpaceRunnableHandler {
*/ */
public void writeFilestream(Context context, String fileName, InputStream inputStream, String type) public void writeFilestream(Context context, String fileName, InputStream inputStream, String type)
throws IOException, SQLException, AuthorizeException; throws IOException, SQLException, AuthorizeException;
/**
* This method will return a List of UUIDs for the special groups
* associated with the processId contained by specific implementations of this interface.
* Otherwise, it returns an empty collection.
* @return List containing UUIDs of Special Groups of the associated Process.
*/
public List<UUID> getSpecialGroups();
} }

View File

@@ -10,7 +10,10 @@ package org.dspace.scripts.handler.impl;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.UUID;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
@@ -113,4 +116,9 @@ public class CommandLineDSpaceRunnableHandler implements DSpaceRunnableHandler {
File file = new File(fileName); File file = new File(fileName);
FileUtils.copyInputStreamToFile(inputStream, file); FileUtils.copyInputStreamToFile(inputStream, file);
} }
@Override
public List<UUID> getSpecialGroups() {
return Collections.emptyList();
}
} }

View File

@@ -11,11 +11,13 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.ProcessLogLevel; import org.dspace.scripts.ProcessLogLevel;
@@ -32,11 +34,14 @@ public interface ProcessService {
* @param ePerson The ePerson for which this process will be created on * @param ePerson The ePerson for which this process will be created on
* @param scriptName The script name to be used for the process * @param scriptName The script name to be used for the process
* @param parameters The parameters to be used for the process * @param parameters The parameters to be used for the process
* @param specialGroups Allows to set special groups, associated with application context when process is created,
* other than the ones derived from the eperson membership.
* @return The created process * @return The created process
* @throws SQLException If something goes wrong * @throws SQLException If something goes wrong
*/ */
public Process create(Context context, EPerson ePerson, String scriptName, public Process create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) throws SQLException; List<DSpaceCommandLineParameter> parameters,
final Set<Group> specialGroups) throws SQLException;
/** /**
* This method will retrieve a Process object from the Database with the given ID * This method will retrieve a Process object from the Database with the given ID

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id UUID NOT NULL UNIQUE,
profile_item_id UUID,
access_token VARCHAR(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id RAW(16) NOT NULL UNIQUE,
profile_item_id RAW(16),
access_token VARCHAR2(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -0,0 +1,24 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-----------------------------------------------------------------------------------
-- Create table for ORCID access tokens
-----------------------------------------------------------------------------------
CREATE SEQUENCE orcid_token_id_seq;
CREATE TABLE orcid_token
(
id INTEGER NOT NULL,
eperson_id uuid NOT NULL UNIQUE,
profile_item_id uuid,
access_token VARCHAR(100) NOT NULL,
CONSTRAINT orcid_token_pkey PRIMARY KEY (id),
CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid),
CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid)
);

View File

@@ -0,0 +1,18 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-------------------------------------------------------------------------------
-- Table to store Groups related to a Process on its creation
-------------------------------------------------------------------------------
CREATE TABLE Process2Group
(
process_id INTEGER REFERENCES Process(process_id),
group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE,
CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id)
);

View File

@@ -43,11 +43,11 @@
class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping"> class="org.dspace.importer.external.arxiv.metadatamapping.ArXivFieldMapping">
</bean> </bean>
<bean id="pubmedImportService" <bean id="pubmedImportService"
class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl"> class="org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl">
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/> <property name="urlFetch" value="${pubmed.url.fetch}"/>
<property name="urlSearch" value="${pubmed.url.search}"/>
<property name="generateQueryForItem" ref="pubmedService"></property> <property name="generateQueryForItem" ref="pubmedService"></property>
<property name="supportedExtensions"> <property name="supportedExtensions">
<list> <list>
@@ -56,7 +56,6 @@
</property> </property>
</bean> </bean>
<bean id="pubmedMetadataFieldMapping" <bean id="pubmedMetadataFieldMapping"
class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping"> class="org.dspace.importer.external.pubmed.metadatamapping.PubmedFieldMapping">
</bean> </bean>
@@ -120,6 +119,7 @@
<property name="url" value="${crossref.url}"/> <property name="url" value="${crossref.url}"/>
</bean> </bean>
<bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/> <bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/>
<bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton"> <bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/> <property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/>
<property name="consumerKey" value="${epo.consumerKey}" /> <property name="consumerKey" value="${epo.consumerKey}" />
@@ -133,6 +133,15 @@
</bean> </bean>
<bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/> <bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/>
<bean id="ScopusImportService" class="org.dspace.importer.external.scopus.service.ScopusImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="scopusMetadataFieldMapping"/>
<property name="url" value="${scopus.url}"/>
<property name="apiKey" value="${scopus.apiKey}"/>
<property name="instKey" value="${scopus.instToken}"/>
<property name="viewMode" value="${scopus.search-api.viewMode}"/>
</bean>
<bean id="scopusMetadataFieldMapping" class="org.dspace.importer.external.scopus.service.ScopusFieldMapping"/>
<bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton"> <bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton">
<!-- Set to empty to use the default set of fields --> <!-- Set to empty to use the default set of fields -->
<constructor-arg type="java.lang.String" value=""/> <constructor-arg type="java.lang.String" value=""/>
@@ -148,6 +157,27 @@
</bean> </bean>
<bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/> <bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/>
<bean id="WosImportService" class="org.dspace.importer.external.wos.service.WOSImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="wosMetadataFieldMapping"/>
<property name="apiKey" value="${wos.apiKey}"/>
<property name="url" value="${wos.url}"/>
<property name="urlSearch" value="${wos.url.search}"/>
</bean>
<bean id="wosMetadataFieldMapping" class="org.dspace.importer.external.wos.service.WOSFieldMapping"/>
<bean id="PubmedEuropeImportService" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="PubmedEuropeMetadataFieldMapping"/>
<property name="url" value="${pubmedeurope.url}"/>
</bean>
<bean id="PubmedEuropeMetadataFieldMapping" class="org.dspace.importer.external.pubmedeurope.PubmedEuropeFieldMapping"/>
<bean id="CiniiImportService" class="org.dspace.importer.external.cinii.CiniiImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="CiniiMetadataFieldMapping"/>
<property name="url" value="${cinii.url}"/>
<property name="urlSearch" value="${cinii.url.search}"/>
</bean>
<bean id="CiniiMetadataFieldMapping" class="org.dspace.importer.external.cinii.CiniiFieldMapping"/>
<bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton"> <bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton">
<property name="apiKey" value="${ads.key}" /> <property name="apiKey" value="${ads.key}" />
<property name="url" value="${ads.url}" /> <property name="url" value="${ads.url}" />
@@ -161,4 +191,4 @@
<constructor-arg value="dc.identifier.other"/> <constructor-arg value="dc.identifier.other"/>
</bean> </bean>
</beans> </beans>

View File

@@ -145,6 +145,8 @@ useProxies = true
proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true proxies.trusted.include_ui_ip = true
csvexport.dir = dspace-server-webapp/src/test/data/dspaceFolder/exports
# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN # For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN
management.health.solrOai.enabled = false management.health.solrOai.enabled = false
@@ -154,4 +156,4 @@ researcher-profile.entity-type = Person
# These settings ensure "dspace.object.owner" field are indexed by Authority Control # These settings ensure "dspace.object.owner" field are indexed by Authority Control
choices.plugin.dspace.object.owner = EPersonAuthority choices.plugin.dspace.object.owner = EPersonAuthority
choices.presentation.dspace.object.owner = suggest choices.presentation.dspace.object.owner = suggest
authority.controlled.dspace.object.owner = true authority.controlled.dspace.object.owner = true

View File

@@ -42,10 +42,10 @@
</bean> </bean>
<bean class="org.dspace.external.provider.impl.OrcidV3AuthorDataProvider" init-method="init"> <bean class="org.dspace.external.provider.impl.OrcidV3AuthorDataProvider" init-method="init">
<property name="sourceIdentifier" value="orcid"/> <property name="sourceIdentifier" value="orcid"/>
<property name="orcidUrl" value="${orcid.url}" /> <property name="orcidUrl" value="${orcid.domain-url}" />
<property name="clientId" value="${orcid.clientid}" /> <property name="clientId" value="${orcid.application-client-id}" />
<property name="clientSecret" value="${orcid.clientsecret}" /> <property name="clientSecret" value="${orcid.application-client-secret}" />
<property name="OAUTHUrl" value="${orcid.oauth.url}" /> <property name="OAUTHUrl" value="${orcid.token-url}" />
<property name="orcidRestConnector" ref="orcidRestConnector"/> <property name="orcidRestConnector" ref="orcidRestConnector"/>
<property name="supportedEntityTypes"> <property name="supportedEntityTypes">
<list> <list>
@@ -55,26 +55,30 @@
</bean> </bean>
<bean id="orcidRestConnector" class="org.dspace.external.OrcidRestConnector"> <bean id="orcidRestConnector" class="org.dspace.external.OrcidRestConnector">
<constructor-arg value="${orcid.api.url}"/> <constructor-arg value="${orcid.api-url}"/>
</bean> </bean>
<bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider"> <bean id="pubmedLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
<property name="metadataSource" ref="mockPubmedImportService"/> <property name="metadataSource" ref="pubmedImportService"/>
<property name="sourceIdentifier" value="pubmed"/> <property name="sourceIdentifier" value="pubmed"/>
<property name="recordIdMetadata" value="dc.identifier.other"/> <property name="recordIdMetadata" value="dc.identifier.other"/>
</bean> <property name="supportedEntityTypes">
<bean id="mockPubmedImportService"
class="org.dspace.external.provider.impl.MockPubmedImportMetadataSourceServiceImpl">
<property name="metadataFieldMapping" ref="pubmedMetadataFieldMapping"/>
<property name="baseAddress" value="https://eutils.ncbi.nlm.nih.gov/entrez/eutils/"/>
<property name="supportedExtensions">
<list> <list>
<value>xml</value> <value>Publication</value>
<value>none</value>
</list> </list>
</property> </property>
</bean> </bean>
</beans> <bean id="scopusLiveImportDataProvider" class="org.dspace.external.provider.impl.LiveImportDataProvider">
<property name="metadataSource" ref="ScopusImportService"/>
<property name="sourceIdentifier" value="scopus"/>
<property name="recordIdMetadata" value="dc.identifier.scopus"/>
<property name="supportedEntityTypes">
<list>
<value>Publication</value>
</list>
</property>
</bean>
</beans>

View File

@@ -22,6 +22,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/> <property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportCli"/>
</bean> </bean>
<bean id="metadata-export-search" class="org.dspace.app.bulkedit.MetadataExportSearchCliScriptConfiguration">
<property name="description" value="export metadata from a discovery search" />
<property name="dspaceRunnableClass" value="org.dspace.app.bulkedit.MetadataExportSearchCli" />
</bean>
<bean id="curate" <bean id="curate"
class="org.dspace.curate.CurationCliScriptConfiguration"> class="org.dspace.curate.CurationCliScriptConfiguration">
<property name="description" <property name="description"
@@ -45,6 +50,11 @@
<property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/> <property name="dspaceRunnableClass" value="org.dspace.app.mediafilter.MediaFilterScript"/>
</bean> </bean>
<bean id="solr-database-resync" class="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCliScriptConfiguration">
<property name="description" value="Update the database status of Items in solr"/>
<property name="dspaceRunnableClass" value="org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli"/>
</bean>
<bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype"> <bean id="another-mock-script" class="org.dspace.scripts.MockDSpaceRunnableScriptConfiguration" scope="prototype">
<property name="description" value="Mocking a script for testing purposes" /> <property name="description" value="Mocking a script for testing purposes" />
<property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/> <property name="dspaceRunnableClass" value="org.dspace.scripts.impl.MockDSpaceRunnableScript"/>

View File

@@ -0,0 +1,253 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.bulkedit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.google.common.io.Files;
import com.opencsv.CSVReader;
import com.opencsv.exceptions.CsvException;
import org.apache.log4j.Logger;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchUtils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase {
private String subject1 = "subject1";
private String subject2 = "subject2";
private int numberItemsSubject1 = 30;
private int numberItemsSubject2 = 2;
private Item[] itemsSubject1 = new Item[numberItemsSubject1];
private Item[] itemsSubject2 = new Item[numberItemsSubject2];
private String filename;
private Collection collection;
private Logger logger = Logger.getLogger(MetadataExportSearchIT.class);
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
private SearchService searchService = SearchUtils.getSearchService();
@Override
@Before
public void setUp() throws Exception {
super.setUp();
// dummy search so that the SearchService gets called in a test context first
DiscoverQuery query = new DiscoverQuery();
query.setMaxResults(0);
searchService.search(context, query);
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context).build();
collection = CollectionBuilder.createCollection(context, community).build();
filename = configurationService.getProperty("dspace.dir")
+ testProps.get("test.exportcsv").toString();
for (int i = 0; i < numberItemsSubject1; i++) {
itemsSubject1[i] = ItemBuilder.createItem(context, collection)
.withTitle(String.format("%s item %d", subject1, i))
.withSubject(subject1)
.withIssueDate("2020-09-" + i)
.build();
}
for (int i = 0; i < numberItemsSubject2; i++) {
itemsSubject2[i] = ItemBuilder.createItem(context, collection)
.withTitle(String.format("%s item %d", subject2, i))
.withSubject(subject2)
.withIssueDate("2021-09-" + i)
.build();
}
context.restoreAuthSystemState();
}
private void checkItemsPresentInFile(String filename, Item[] items) throws IOException, CsvException {
File file = new File(filename);
Reader reader = Files.newReader(file, Charset.defaultCharset());
CSVReader csvReader = new CSVReader(reader);
List<String[]> lines = csvReader.readAll();
//length + 1 is because of 1 row extra for the headers
assertEquals(items.length + 1, lines.size());
List<String> ids = new ArrayList<>();
//ignoring the first row as this only contains headers;
logger.debug("checking content of lines");
for (int i = 1; i < lines.size(); i++) {
logger.debug(String.join(", ", lines.get(i)));
ids.add(lines.get(i)[0]);
}
for (Item item : items) {
assertTrue(ids.contains(item.getID().toString()));
}
}
@Test
public void metadateExportSearchQueryTest() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-q", "subject:" + subject1, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
result = runDSpaceScript("metadata-export-search", "-q", "subject: " + subject2, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject2);
}
@Test
public void exportMetadataSearchSpecificContainerTest() throws Exception {
context.turnOffAuthorisationSystem();
Community community2 = CommunityBuilder.createCommunity(context).build();
Collection collection2 = CollectionBuilder.createCollection(context, community2).build();
int numberItemsDifferentCollection = 15;
Item[] itemsDifferentCollection = new Item[numberItemsDifferentCollection];
for (int i = 0; i < numberItemsDifferentCollection; i++) {
itemsDifferentCollection[i] = ItemBuilder.createItem(context, collection2)
.withTitle("item different collection " + i)
.withSubject(subject1)
.build();
}
//creating some items with a different subject to make sure the query still works
for (int i = 0; i < 5; i++) {
ItemBuilder.createItem(context, collection2)
.withTitle("item different collection, different subject " + i)
.withSubject(subject2)
.build();
}
context.restoreAuthSystemState();
int result = runDSpaceScript(
"metadata-export-search", "-q", "subject: " + subject1, "-s", collection2.getID().toString(), "-n", filename
);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsDifferentCollection);
}
@Test
public void exportMetadataSearchFilter() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=" + subject1, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
}
@Test
public void exportMetadataSearchFilterDate() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "dateIssued,equals=[2000 TO 2020]", "-n", filename
);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject1);
}
@Test
public void exportMetadataSearchMultipleFilters() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "subject,equals=" + subject1, "-f",
"title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename
);
assertEquals(0, result);
Item[] expectedResult = Arrays.copyOfRange(itemsSubject1, 0, 1);
checkItemsPresentInFile(filename, expectedResult);
}
@Test
public void exportMetadataSearchEqualsFilterTest()
throws Exception {
context.turnOffAuthorisationSystem();
Item wellBeingItem = ItemBuilder.createItem(context, collection)
.withTitle("test item well-being")
.withSubject("well-being")
.build();
ItemBuilder.createItem(context, collection)
.withTitle("test item financial well-being")
.withSubject("financial well-being")
.build();
context.restoreAuthSystemState();
int result = runDSpaceScript("metadata-export-search", "-f", "subject,equals=well-being", "-n", filename);
assertEquals(0, result);
Item[] expectedResult = new Item[] {wellBeingItem};
checkItemsPresentInFile(filename, expectedResult);
}
@Test
public void exportMetadataSearchInvalidDiscoveryQueryTest() throws Exception {
int result = runDSpaceScript("metadata-export-search", "-q", "blabla", "-n", filename);
assertEquals(0, result);
Item[] items = {};
checkItemsPresentInFile(filename, items);
}
@Test
public void exportMetadataSearchNoResultsTest() throws Exception {
int result = runDSpaceScript(
"metadata-export-search", "-f", "subject,equals=notExistingSubject", "-n", filename
);
assertEquals(0, result);
Item[] items = {};
checkItemsPresentInFile(filename, items);
}
@Test
public void exportMetadataSearchNonExistinFacetsTest() throws Exception {
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
String[] args = new String[] {"metadata-export-search", "-f", "nonExisting,equals=" + subject1, "-f",
"title,equals=" + String.format("%s item %d", subject1, 0), "-n", filename};
int result = ScriptLauncher.handleScript(
args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl
);
assertEquals(0, result); // exception should be handled, so the script should finish with 0
Exception exception = testDSpaceRunnableHandler.getException();
assertNotNull(exception);
assertEquals("nonExisting is not a valid search filter", exception.getMessage());
}
}

View File

@@ -0,0 +1,154 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.solrdatabaseresync;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD;
import static org.dspace.discovery.indexobject.ItemIndexFactoryImpl.STATUS_FIELD_PREDB;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocumentList;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.discovery.MockSolrSearchCore;
import org.dspace.kernel.ServiceManager;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Before;
import org.junit.Test;
public class SolrDatabaseResyncIT extends AbstractIntegrationTestWithDatabase {
private final ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
private final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
private MockSolrSearchCore searchService;
private Collection col;
private Item item1;
private Item item2;
@Before
public void setUp() throws Exception {
super.setUp();
configurationService.setProperty("solr-database-resync.time-until-reindex", 1);
ServiceManager serviceManager = DSpaceServicesFactory.getInstance().getServiceManager();
searchService = serviceManager.getServiceByName(null, MockSolrSearchCore.class);
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build();
col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build();
item1 = ItemBuilder.createItem(context, col)
.withTitle("Public item 1")
.withIssueDate("2010-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
item2 = ItemBuilder.createItem(context, col)
.withTitle("Public item 2")
.withIssueDate("2011-08-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
context.setDispatcher("noindex");
}
@Test
public void solrPreDBStatusExistingItemTest() throws Exception {
// Items were created, they should contain a predb status in solr
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
performSolrDatabaseResyncScript();
// Database status script was performed, their predb status should be removed
assertHasNoPreDBStatus(item1);
assertHasNoPreDBStatus(item2);
context.restoreAuthSystemState();
}
@Test
public void solrPreDBStatusRemovedItemTest() throws Exception {
// Items were created, they should contain a predb status in solr
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
collectionService.delete(context, col);
// Items were deleted, they should still contain a predb status in solr for now
assertHasPreDBStatus(item1);
assertHasPreDBStatus(item2);
performSolrDatabaseResyncScript();
// Database status script was performed, their solr document should have been removed
assertNoSolrDocument(item1);
assertNoSolrDocument(item2);
context.restoreAuthSystemState();
}
public void assertHasNoPreDBStatus(Item item) throws Exception {
assertNotEquals(STATUS_FIELD_PREDB, getStatus(item));
}
public void assertHasPreDBStatus(Item item) throws Exception {
assertEquals(STATUS_FIELD_PREDB, getStatus(item));
}
public void assertNoSolrDocument(Item item) throws Exception {
SolrDocumentList solrDocumentList = getSolrDocumentList(item);
assertEquals(0, solrDocumentList.size());
}
public String getStatus(Item item) throws Exception {
SolrDocumentList solrDocumentList = getSolrDocumentList(item);
List fieldValues = ((List) solrDocumentList.get(0).getFieldValues(STATUS_FIELD));
if (CollectionUtils.isNotEmpty(fieldValues)) {
return (String) fieldValues.get(0);
} else {
return null;
}
}
public SolrDocumentList getSolrDocumentList(Item item) throws Exception {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery("search.resourceid:" + item.getID());
QueryResponse queryResponse = searchService.getSolr().query(solrQuery);
return queryResponse.getResults();
}
public void performSolrDatabaseResyncScript() throws Exception {
String[] args = new String[] {"solr-database-resync"};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher
.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
}
}

View File

@@ -13,6 +13,7 @@ import java.util.List;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.app.requestitem.factory.RequestItemServiceFactory; import org.dspace.app.requestitem.factory.RequestItemServiceFactory;
import org.dspace.app.requestitem.service.RequestItemService; import org.dspace.app.requestitem.service.RequestItemService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -45,6 +46,7 @@ import org.dspace.eperson.service.RegistrationDataService;
import org.dspace.scripts.factory.ScriptServiceFactory; import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
import org.dspace.services.factory.DSpaceServicesFactory; import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.utils.DSpace;
import org.dspace.versioning.factory.VersionServiceFactory; import org.dspace.versioning.factory.VersionServiceFactory;
import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersionHistoryService;
import org.dspace.versioning.service.VersioningService; import org.dspace.versioning.service.VersioningService;
@@ -95,6 +97,7 @@ public abstract class AbstractBuilder<T, S> {
static ProcessService processService; static ProcessService processService;
static RequestItemService requestItemService; static RequestItemService requestItemService;
static VersioningService versioningService; static VersioningService versioningService;
static OrcidTokenService orcidTokenService;
protected Context context; protected Context context;
@@ -151,6 +154,7 @@ public abstract class AbstractBuilder<T, S> {
inProgressUserService = XmlWorkflowServiceFactory.getInstance().getInProgressUserService(); inProgressUserService = XmlWorkflowServiceFactory.getInstance().getInProgressUserService();
poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService(); poolTaskService = XmlWorkflowServiceFactory.getInstance().getPoolTaskService();
workflowItemRoleService = XmlWorkflowServiceFactory.getInstance().getWorkflowItemRoleService(); workflowItemRoleService = XmlWorkflowServiceFactory.getInstance().getWorkflowItemRoleService();
orcidTokenService = new DSpace().getSingletonService(OrcidTokenService.class);
} }
@@ -183,6 +187,7 @@ public abstract class AbstractBuilder<T, S> {
processService = null; processService = null;
requestItemService = null; requestItemService = null;
versioningService = null; versioningService = null;
orcidTokenService = null;
} }

View File

@@ -63,7 +63,7 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
final String qualifier, final String qualifier,
final String value) { final String value) {
try { try {
getService().addMetadata(context, dso, schema, element, qualifier, Item.ANY, value); getService().addMetadata(context, dso, schema, element, qualifier, null, value);
} catch (Exception e) { } catch (Exception e) {
return handleException(e); return handleException(e);
} }

View File

@@ -129,6 +129,16 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
return this; return this;
} }
public EPersonBuilder withOrcid(final String orcid) {
setMetadataSingleValue(ePerson, "eperson", "orcid", null, orcid);
return this;
}
public EPersonBuilder withOrcidScope(final String scope) {
addMetadataValue(ePerson, "eperson", "orcid", "scope", scope);
return this;
}
public static void deleteEPerson(UUID uuid) throws SQLException, IOException { public static void deleteEPerson(UUID uuid) throws SQLException, IOException {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();

View File

@@ -158,6 +158,30 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED); return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED);
} }
public ItemBuilder withOrcidIdentifier(String orcid) {
return addMetadataValue(item, "person", "identifier", "orcid", orcid);
}
public ItemBuilder withOrcidAccessToken(String accessToken, EPerson owner) {
try {
OrcidTokenBuilder.create(context, owner, accessToken)
.withProfileItem(item)
.build();
} catch (SQLException | AuthorizeException e) {
throw new RuntimeException(e);
}
return this;
}
public ItemBuilder withOrcidAuthenticated(String authenticated) {
return addMetadataValue(item, "dspace", "orcid", "authenticated", authenticated);
}
public ItemBuilder makeUnDiscoverable() { public ItemBuilder makeUnDiscoverable() {
item.setDiscoverable(false); item.setDiscoverable(false);
return this; return this;

View File

@@ -0,0 +1,76 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.builder;
import java.sql.SQLException;
import org.dspace.app.orcid.OrcidToken;
import org.dspace.app.orcid.service.OrcidTokenService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
/**
* Builder for {@link OrcidToken} entities.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class OrcidTokenBuilder extends AbstractBuilder<OrcidToken, OrcidTokenService> {
private OrcidToken orcidToken;
protected OrcidTokenBuilder(Context context) {
super(context);
}
public static OrcidTokenBuilder create(Context context, EPerson ePerson, String accessToken) {
OrcidTokenBuilder builder = new OrcidTokenBuilder(context);
builder.create(ePerson, accessToken);
return builder;
}
private void create(EPerson ePerson, String accessToken) {
orcidToken = orcidTokenService.create(context, ePerson, accessToken);
}
public OrcidTokenBuilder withProfileItem(Item profileItem) {
orcidToken.setProfileItem(profileItem);
return this;
}
@Override
public OrcidToken build() throws SQLException, AuthorizeException {
return orcidToken;
}
@Override
public void delete(Context c, OrcidToken orcidToken) throws Exception {
orcidTokenService.delete(c, orcidToken);
}
@Override
public void cleanup() throws Exception {
try (Context context = new Context()) {
context.setDispatcher("noindex");
context.turnOffAuthorisationSystem();
orcidToken = context.reloadEntity(orcidToken);
if (orcidToken != null) {
delete(context, orcidToken);
context.complete();
}
}
}
@Override
protected OrcidTokenService getService() {
return orcidTokenService;
}
}

View File

@@ -12,11 +12,13 @@ import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.List; import java.util.List;
import java.util.Set;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.ProcessStatus; import org.dspace.content.ProcessStatus;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceCommandLineParameter;
import org.dspace.scripts.Process; import org.dspace.scripts.Process;
import org.dspace.scripts.service.ProcessService; import org.dspace.scripts.service.ProcessService;
@@ -33,14 +35,22 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
List<DSpaceCommandLineParameter> parameters) List<DSpaceCommandLineParameter> parameters)
throws SQLException { throws SQLException {
ProcessBuilder processBuilder = new ProcessBuilder(context); ProcessBuilder processBuilder = new ProcessBuilder(context);
return processBuilder.create(context, ePerson, scriptName, parameters); return processBuilder.create(context, ePerson, scriptName, parameters, null);
}
public static ProcessBuilder createProcess(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters,
Set<Group> specialGroups)
throws SQLException {
ProcessBuilder processBuilder = new ProcessBuilder(context);
return processBuilder.create(context, ePerson, scriptName, parameters, specialGroups);
} }
private ProcessBuilder create(Context context, EPerson ePerson, String scriptName, private ProcessBuilder create(Context context, EPerson ePerson, String scriptName,
List<DSpaceCommandLineParameter> parameters) List<DSpaceCommandLineParameter> parameters, final Set<Group> specialGroups)
throws SQLException { throws SQLException {
this.context = context; this.context = context;
this.process = processService.create(context, ePerson, scriptName, parameters); this.process = processService.create(context, ePerson, scriptName, parameters, specialGroups);
this.process.setProcessStatus(ProcessStatus.SCHEDULED); this.process.setProcessStatus(ProcessStatus.SCHEDULED);
return this; return this;
} }

View File

@@ -25,6 +25,7 @@ import org.dspace.builder.GroupBuilder;
import org.dspace.builder.ItemBuilder; import org.dspace.builder.ItemBuilder;
import org.dspace.builder.MetadataFieldBuilder; import org.dspace.builder.MetadataFieldBuilder;
import org.dspace.builder.MetadataSchemaBuilder; import org.dspace.builder.MetadataSchemaBuilder;
import org.dspace.builder.OrcidTokenBuilder;
import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.PoolTaskBuilder;
import org.dspace.builder.ProcessBuilder; import org.dspace.builder.ProcessBuilder;
import org.dspace.builder.RelationshipBuilder; import org.dspace.builder.RelationshipBuilder;
@@ -56,6 +57,7 @@ public class AbstractBuilderCleanupUtil {
} }
private void initMap() { private void initMap() {
map.put(OrcidTokenBuilder.class.getName(), new ArrayList<>());
map.put(ResourcePolicyBuilder.class.getName(), new ArrayList<>()); map.put(ResourcePolicyBuilder.class.getName(), new ArrayList<>());
map.put(RelationshipBuilder.class.getName(), new ArrayList<>()); map.put(RelationshipBuilder.class.getName(), new ArrayList<>());
map.put(RequestItemBuilder.class.getName(), new ArrayList<>()); map.put(RequestItemBuilder.class.getName(), new ArrayList<>());

View File

@@ -8,13 +8,18 @@
package org.dspace.discovery; package org.dspace.discovery;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List; import java.util.List;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import org.dspace.AbstractIntegrationTestWithDatabase; import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.app.launcher.ScriptLauncher;
import org.dspace.app.scripts.handler.impl.TestDSpaceRunnableHandler;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.ClaimedTaskBuilder;
import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CollectionBuilder;
@@ -86,6 +91,12 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance() MetadataAuthorityService metadataAuthorityService = ContentAuthorityServiceFactory.getInstance()
.getMetadataAuthorityService(); .getMetadataAuthorityService();
@Override
public void setUp() throws Exception {
super.setUp();
configurationService.setProperty("solr-database-resync.time-until-reindex", 1);
}
@Test @Test
public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception { public void solrRecordsAfterDepositOrDeletionOfWorkspaceItemTest() throws Exception {
context.turnOffAuthorisationSystem(); context.turnOffAuthorisationSystem();
@@ -371,7 +382,8 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
collectionService.delete(context, col1); collectionService.delete(context, col1);
context.restoreAuthSystemState(); context.restoreAuthSystemState();
assertSearchQuery(IndexableCollection.TYPE, 2); assertSearchQuery(IndexableCollection.TYPE, 2);
assertSearchQuery(IndexableItem.TYPE, 2); // Deleted item contained within totalFound due to predb status (SolrDatabaseResyncCli takes care of this)
assertSearchQuery(IndexableItem.TYPE, 2, 3, 0, -1);
} }
@Test @Test
@@ -453,6 +465,10 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1); assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1);
// check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6 // check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2); assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2);
// Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items
performSolrDatabaseResyncScript();
// check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3 // check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3
assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4); assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4);
// check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3 // check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
@@ -639,16 +655,85 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
// check Item type with start=0 and limit=default, // check Item type with start=0 and limit=default,
// we expect: indexableObjects=3, totalFound=6 (3 stale objects here) // we expect: indexableObjects=3, totalFound=6 (3 stale objects here)
assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1); assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1);
// as the previous query hit the stale objects running a new query should lead to a clean situation
// Run SolrDatabaseResyncCli, updating items with "preDB" status and removing stale items
performSolrDatabaseResyncScript();
// as SolrDatabaseResyncCli removed the stale objects, running a new query should lead to a clean situation
assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1); assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1);
} }
@Test
public void iteratorSearchServiceTest() throws SearchServiceException {
String subject1 = "subject1";
String subject2 = "subject2";
int numberItemsSubject1 = 30;
int numberItemsSubject2 = 2;
Item[] itemsSubject1 = new Item[numberItemsSubject1];
Item[] itemsSubject2 = new Item[numberItemsSubject2];
context.turnOffAuthorisationSystem();
Community community = CommunityBuilder.createCommunity(context).build();
Collection collection = CollectionBuilder.createCollection(context, community).build();
for (int i = 0; i < numberItemsSubject1; i++) {
itemsSubject1[i] = ItemBuilder.createItem(context, collection)
.withTitle("item subject 1 number" + i)
.withSubject(subject1)
.build();
}
for (int i = 0; i < numberItemsSubject2; i++) {
itemsSubject2[i] = ItemBuilder.createItem(context, collection)
.withTitle("item subject 2 number " + i)
.withSubject(subject2)
.build();
}
Collection collection2 = CollectionBuilder.createCollection(context, community).build();
ItemBuilder.createItem(context, collection2)
.withTitle("item collection2")
.withSubject(subject1)
.build();
context.restoreAuthSystemState();
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries("subject:" + subject1);
Iterator<Item> itemIterator =
searchService.iteratorSearch(context, new IndexableCollection(collection), discoverQuery);
int counter = 0;
List<Item> foundItems = new ArrayList<>();
while (itemIterator.hasNext()) {
foundItems.add(itemIterator.next());
counter++;
}
for (Item item : itemsSubject1) {
assertTrue(foundItems.contains(item));
}
assertEquals(numberItemsSubject1, counter);
discoverQuery = new DiscoverQuery();
discoverQuery.addFilterQueries("subject:" + subject2);
itemIterator = searchService.iteratorSearch(context, null, discoverQuery);
counter = 0;
foundItems = new ArrayList<>();
while (itemIterator.hasNext()) {
foundItems.add(itemIterator.next());
counter++;
}
assertEquals(numberItemsSubject2, counter);
for (Item item : itemsSubject2) {
assertTrue(foundItems.contains(item));
}
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1); assertSearchQuery(resourceType, size, size, 0, -1);
} }
private void assertSearchQuery(String resourceType, int size, int totalFound, int start, int limit) private void assertSearchQuery(String resourceType, int size, int totalFound, int start, int limit)
throws SearchServiceException { throws SearchServiceException {
DiscoverQuery discoverQuery = new DiscoverQuery(); DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.setQuery("*:*"); discoverQuery.setQuery("*:*");
discoverQuery.setStart(start); discoverQuery.setStart(start);
@@ -739,6 +824,13 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
context.setCurrentUser(previousUser); context.setCurrentUser(previousUser);
} }
public void performSolrDatabaseResyncScript() throws Exception {
String[] args = new String[] {"solr-database-resync"};
TestDSpaceRunnableHandler testDSpaceRunnableHandler = new TestDSpaceRunnableHandler();
ScriptLauncher
.handleScript(args, ScriptLauncher.getConfig(kernelImpl), testDSpaceRunnableHandler, kernelImpl);
}
private void abort(XmlWorkflowItem workflowItem) private void abort(XmlWorkflowItem workflowItem)
throws SQLException, AuthorizeException, IOException, SearchServiceException { throws SQLException, AuthorizeException, IOException, SearchServiceException {
final EPerson previousUser = context.getCurrentUser(); final EPerson previousUser = context.getCurrentUser();

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.utils; package org.dspace.discovery.utils;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT; import static org.dspace.discovery.configuration.DiscoveryConfigurationParameters.SORT.COUNT;
@@ -16,10 +16,10 @@ import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyInt;
@@ -35,9 +35,6 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.function.Function; import java.util.function.Function;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.InvalidSearchRequestException;
import org.dspace.app.rest.parameter.SearchFilter;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.discovery.DiscoverFacetField; import org.dspace.discovery.DiscoverFacetField;
import org.dspace.discovery.DiscoverFilterQuery; import org.dspace.discovery.DiscoverFilterQuery;
@@ -45,6 +42,7 @@ import org.dspace.discovery.DiscoverHitHighlightingField;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.FacetYearRange; import org.dspace.discovery.FacetYearRange;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SolrServiceImpl; import org.dspace.discovery.SolrServiceImpl;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationParameters; import org.dspace.discovery.configuration.DiscoveryConfigurationParameters;
@@ -56,6 +54,7 @@ import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration;
import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration; import org.dspace.discovery.configuration.HierarchicalSidebarFacetConfiguration;
import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.factory.IndexFactory; import org.dspace.discovery.indexobject.factory.IndexFactory;
import org.dspace.discovery.utils.parameter.QueryBuilderSearchFilter;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.hamcrest.FeatureMatcher; import org.hamcrest.FeatureMatcher;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
@@ -65,8 +64,7 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort;
/** /**
* Unit tests for {@link DiscoverQueryBuilder} * Unit tests for {@link DiscoverQueryBuilder}
@@ -94,8 +92,14 @@ public class DiscoverQueryBuilderTest {
private DiscoveryConfiguration discoveryConfiguration; private DiscoveryConfiguration discoveryConfiguration;
private String query; private String query;
private SearchFilter searchFilter;
private PageRequest page; private int pageSize = 10;
private long offset = 10;
private String sortProperty = "dc.title";
private String sortDirection = "ASC";
private QueryBuilderSearchFilter searchFilter;
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
@@ -106,33 +110,35 @@ public class DiscoverQueryBuilderTest {
when(configurationService.getIntProperty(eq("rest.search.max.results"), anyInt())).thenReturn(100); when(configurationService.getIntProperty(eq("rest.search.max.results"), anyInt())).thenReturn(100);
when(searchService.toSortFieldIndex(any(String.class), any(String.class))) when(searchService.toSortFieldIndex(any(String.class), any(String.class)))
.then(invocation -> invocation.getArguments()[0] + "_sort"); .then(invocation -> invocation.getArguments()[0] + "_sort");
when(searchService when(searchService
.getFacetYearRange(eq(context), nullable(IndexableObject.class), any(DiscoverySearchFilterFacet.class), .getFacetYearRange(eq(context), nullable(IndexableObject.class),
any(), any(DiscoverQuery.class))) any(DiscoverySearchFilterFacet.class),
.then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2])); any(), any(DiscoverQuery.class)))
.then(invocation -> new FacetYearRange((DiscoverySearchFilterFacet) invocation.getArguments()[2]));
when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class),
any(DiscoveryConfiguration.class))) any(DiscoveryConfiguration.class)))
.then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1], .then(invocation -> new DiscoverFilterQuery((String) invocation.getArguments()[1],
invocation.getArguments()[1] + ":\"" + invocation.getArguments()[3] + "\"", invocation.getArguments()[1] + ":\"" + invocation
(String) invocation.getArguments()[3])); .getArguments()[3] + "\"",
(String) invocation.getArguments()[3]));
discoveryConfiguration = new DiscoveryConfiguration(); discoveryConfiguration = new DiscoveryConfiguration();
discoveryConfiguration.setDefaultFilterQueries(Arrays.asList("archived:true")); discoveryConfiguration.setDefaultFilterQueries(Arrays.asList("archived:true"));
DiscoveryHitHighlightingConfiguration discoveryHitHighlightingConfiguration = DiscoveryHitHighlightingConfiguration discoveryHitHighlightingConfiguration =
new DiscoveryHitHighlightingConfiguration(); new DiscoveryHitHighlightingConfiguration();
List<DiscoveryHitHighlightFieldConfiguration> discoveryHitHighlightFieldConfigurations = new LinkedList<>(); List<DiscoveryHitHighlightFieldConfiguration> discoveryHitHighlightFieldConfigurations = new LinkedList<>();
DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration = DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration =
new DiscoveryHitHighlightFieldConfiguration(); new DiscoveryHitHighlightFieldConfiguration();
discoveryHitHighlightFieldConfiguration.setField("dc.title"); discoveryHitHighlightFieldConfiguration.setField("dc.title");
DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration1 = DiscoveryHitHighlightFieldConfiguration discoveryHitHighlightFieldConfiguration1 =
new DiscoveryHitHighlightFieldConfiguration(); new DiscoveryHitHighlightFieldConfiguration();
discoveryHitHighlightFieldConfiguration1.setField("fulltext"); discoveryHitHighlightFieldConfiguration1.setField("fulltext");
discoveryHitHighlightFieldConfigurations.add(discoveryHitHighlightFieldConfiguration1); discoveryHitHighlightFieldConfigurations.add(discoveryHitHighlightFieldConfiguration1);
@@ -177,9 +183,8 @@ public class DiscoverQueryBuilderTest {
discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSidebarFacets(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet));
discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet)); discoveryConfiguration.setSearchFilters(Arrays.asList(subjectFacet, dateFacet, hierarchyFacet));
searchFilter = new QueryBuilderSearchFilter("subject", "equals", "Java");
query = "my test case"; query = "my test case";
searchFilter = new SearchFilter("subject", "equals", "Java");
page = PageRequest.of(1, 10, Sort.Direction.ASC, "dc.title");
queryBuilder.afterPropertiesSet(); queryBuilder.afterPropertiesSet();
} }
@@ -188,7 +193,8 @@ public class DiscoverQueryBuilderTest {
public void testBuildQuery() throws Exception { public void testBuildQuery() throws Exception {
DiscoverQuery discoverQuery = queryBuilder DiscoverQuery discoverQuery = queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "item", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"item", pageSize, offset, sortProperty, sortDirection);
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\""));
assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getQuery(), is(query));
@@ -214,10 +220,11 @@ public class DiscoverQueryBuilderTest {
@Test @Test
public void testBuildQueryDefaults() throws Exception { public void testBuildQueryDefaults() throws Exception {
DiscoverQuery discoverQuery = DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null); queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), null, null,
null, null);
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true"));
assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); assertThat(discoverQuery.getQuery(), isEmptyOrNullString());
assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty()));
//Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid
// mock // mock
@@ -241,13 +248,12 @@ public class DiscoverQueryBuilderTest {
@Test @Test
public void testSortByScore() throws Exception { public void testSortByScore() throws Exception {
page = PageRequest.of(2, 10, Sort.Direction.ASC, "SCORE");
DiscoverQuery discoverQuery = DiscoverQuery discoverQuery =
queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); queryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), 10, 20L,
"SCORE", "ASC");
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true"));
assertThat(discoverQuery.getQuery(), is(emptyOrNullString())); assertThat(discoverQuery.getQuery(), isEmptyOrNullString());
assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty())); assertThat(discoverQuery.getDSpaceObjectFilters(), is(empty()));
//Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid //Note this should actually be "dc.date.accessioned_dt" but remember that our searchService is just a stupid
// mock // mock
@@ -269,48 +275,50 @@ public class DiscoverQueryBuilderTest {
)); ));
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidDSOType() throws Exception { public void testInvalidDSOType() throws Exception {
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "TEST", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"TEST", pageSize, offset, sortProperty, sortDirection);
} }
@Test(expected = InvalidSearchRequestException.class) @Test(expected = SearchServiceException.class)
public void testInvalidSortField() throws Exception { public void testInvalidSortField() throws Exception {
page = PageRequest.of(2, 10, Sort.Direction.ASC, "test");
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Collections.singletonList(searchFilter),
"ITEM", pageSize, 20L, "test", sortDirection);
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFilter1() throws Exception { public void testInvalidSearchFilter1() throws Exception {
searchFilter = new SearchFilter("test", "equals", "Smith, Donald"); searchFilter = new QueryBuilderSearchFilter("test", "equals", "Smith, Donald");
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM",
pageSize, offset, sortProperty, sortDirection);
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFilter2() throws Exception { public void testInvalidSearchFilter2() throws Exception {
when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class), when(searchService.toFilterQuery(any(Context.class), any(String.class), any(String.class), any(String.class),
any(DiscoveryConfiguration.class))) any(DiscoveryConfiguration.class)))
.thenThrow(SQLException.class); .thenThrow(SQLException.class);
queryBuilder queryBuilder
.buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM", page); .buildQuery(context, scope, discoveryConfiguration, query, Arrays.asList(searchFilter), "ITEM",
pageSize, offset, sortProperty, sortDirection);
} }
@Test @Test
public void testBuildFacetQuery() throws Exception { public void testBuildFacetQuery() throws Exception {
DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, "prefix",
"prefix", query, query, Collections.singletonList(searchFilter),
Arrays.asList(searchFilter), "item", page, "item", pageSize, offset, "subject");
"subject");
assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\"")); assertThat(discoverQuery.getFilterQueries(), containsInAnyOrder("archived:true", "subject:\"Java\""));
assertThat(discoverQuery.getQuery(), is(query)); assertThat(discoverQuery.getQuery(), is(query));
assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE)); assertThat(discoverQuery.getDSpaceObjectFilters(), contains(IndexableItem.TYPE));
assertThat(discoverQuery.getSortField(), is(emptyOrNullString())); assertThat(discoverQuery.getSortField(), isEmptyOrNullString());
assertThat(discoverQuery.getMaxResults(), is(0)); assertThat(discoverQuery.getMaxResults(), is(0));
assertThat(discoverQuery.getStart(), is(0)); assertThat(discoverQuery.getStart(), is(0));
assertThat(discoverQuery.getFacetMinCount(), is(1)); assertThat(discoverQuery.getFacetMinCount(), is(1));
@@ -321,10 +329,10 @@ public class DiscoverQueryBuilderTest {
)); ));
} }
@Test(expected = DSpaceBadRequestException.class) @Test(expected = IllegalArgumentException.class)
public void testInvalidSearchFacet() throws Exception { public void testInvalidSearchFacet() throws Exception {
queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query, queryBuilder.buildFacetQuery(context, scope, discoveryConfiguration, null, query,
Arrays.asList(searchFilter), "item", page, "test"); Collections.singletonList(searchFilter), "item", pageSize, offset, "test");
} }
public Matcher<DiscoverFacetField> discoverFacetFieldMatcher(DiscoverFacetField expected) { public Matcher<DiscoverFacetField> discoverFacetFieldMatcher(DiscoverFacetField expected) {

View File

@@ -1,87 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.external.provider.impl;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.util.FileCopyUtils;
/**
* we override the init method to mock the rest call to pubmed the following
* mock definitions will allow to answer to efetch or esearch requests using the
* test resource files (pubmed-esearch.fcgi.xml or pubmed-efetch.fcgi.xml)
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
*/
public class MockPubmedImportMetadataSourceServiceImpl extends PubmedImportMetadataSourceServiceImpl {
@Override
public void init() throws Exception {
pubmedWebTarget = Mockito.mock(WebTarget.class);
ArgumentCaptor<String> valueCapture = ArgumentCaptor.forClass(String.class);
when(pubmedWebTarget.queryParam(ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenAnswer(new Answer<WebTarget>() {
@Override
public WebTarget answer(InvocationOnMock invocation) throws Throwable {
return pubmedWebTarget;
}
});
when(pubmedWebTarget.path(valueCapture.capture())).thenAnswer(new Answer<WebTarget>() {
@Override
public WebTarget answer(InvocationOnMock invocation) throws Throwable {
return pubmedWebTarget;
}
});
when(pubmedWebTarget.request(ArgumentMatchers.any(MediaType.class)))
.thenAnswer(new Answer<Invocation.Builder>() {
@Override
public Invocation.Builder answer(InvocationOnMock invocation) throws Throwable {
Invocation.Builder builder = Mockito.mock(Invocation.Builder.class);
when(builder.get()).thenAnswer(new Answer<Response>() {
@Override
public Response answer(InvocationOnMock invocation) throws Throwable {
Response response = Mockito.mock(Response.class);
when(response.readEntity(ArgumentMatchers.eq(String.class))).then(new Answer<String>() {
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
String resourceName = "pubmed-" + valueCapture.getValue() + ".xml";
InputStream resource = getClass().getResourceAsStream(resourceName);
try (Reader reader = new InputStreamReader(resource, StandardCharsets.UTF_8)) {
return FileCopyUtils.copyToString(reader);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
return response;
}
});
return builder;
};
});
}
}

View File

@@ -0,0 +1,90 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.process;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.dspace.AbstractIntegrationTestWithDatabase;
import org.dspace.builder.GroupBuilder;
import org.dspace.builder.ProcessBuilder;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.dspace.scripts.Process;
import org.dspace.scripts.factory.ScriptServiceFactory;
import org.dspace.scripts.service.ProcessService;
import org.junit.Test;
/**
* This class will aim to test Process related use cases
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/
public class ProcessIT extends AbstractIntegrationTestWithDatabase {
protected ProcessService processService = ScriptServiceFactory.getInstance().getProcessService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
@Test
public void checkProcessGroupsTest() throws Exception {
context.turnOffAuthorisationSystem();
Group groupA = GroupBuilder.createGroup(context)
.withName("Group A")
.addMember(admin)
.build();
Set<Group> groupSet = new HashSet<>();
groupSet.add(groupA);
Process processA = ProcessBuilder.createProcess(context, admin, "mock-script",
new LinkedList<>(),
groupSet).build();
context.restoreAuthSystemState();
Process process = processService.find(context, processA.getID());
List<Group> groups = process.getGroups();
boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupA.getID()));
assertTrue(isPresent);
}
@Test
public void removeOneGroupTest() throws Exception {
context.turnOffAuthorisationSystem();
Group groupA = GroupBuilder.createGroup(context)
.withName("Group A")
.addMember(admin).build();
Set<Group> groupSet = new HashSet<>();
groupSet.add(groupA);
UUID groupUuid = groupA.getID();
Process processA = ProcessBuilder.createProcess(context, admin, "mock-script", new LinkedList<>(),
groupSet).build();
context.restoreAuthSystemState();
groupService.delete(context, groupA);
context.commit();
context.reloadEntity(groupA);
processA = context.reloadEntity(processA);
Process process = processService.find(context, processA.getID());
List<Group> groups = process.getGroups();
boolean isPresent = groups.stream().anyMatch(g -> g.getID().equals(groupUuid));
assertFalse(isPresent);
}
}

View File

@@ -12,4 +12,4 @@ test.folder = ./target/testing/
# Path of the test bitstream (to use in BitstreamTest and elsewhere) # Path of the test bitstream (to use in BitstreamTest and elsewhere)
test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf
test.exportcsv = ./target/testing/dspace/assetstore/test.csv test.exportcsv = ./target/testing/dspace/assetstore/test.csv
test.importcsv = ./target/testing/dspace/assetstore/testImport.csv test.importcsv = ./target/testing/dspace/assetstore/testImport.csv

View File

@@ -20,6 +20,7 @@ import org.dspace.app.rest.utils.DSpaceAPIRequestLoggingFilter;
import org.dspace.app.rest.utils.DSpaceConfigurationInitializer; import org.dspace.app.rest.utils.DSpaceConfigurationInitializer;
import org.dspace.app.rest.utils.DSpaceKernelInitializer; import org.dspace.app.rest.utils.DSpaceKernelInitializer;
import org.dspace.app.sitemap.GenerateSitemaps; import org.dspace.app.sitemap.GenerateSitemaps;
import org.dspace.app.solrdatabaseresync.SolrDatabaseResyncCli;
import org.dspace.app.util.DSpaceContextListener; import org.dspace.app.util.DSpaceContextListener;
import org.dspace.utils.servlet.DSpaceWebappServletFilter; import org.dspace.utils.servlet.DSpaceWebappServletFilter;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -71,6 +72,11 @@ public class Application extends SpringBootServletInitializer {
GenerateSitemaps.generateSitemapsScheduled(); GenerateSitemaps.generateSitemapsScheduled();
} }
@Scheduled(cron = "${solr-database-resync.cron:-}")
public void solrDatabaseResync() throws Exception {
SolrDatabaseResyncCli.runScheduled();
}
/** /**
* Override the default SpringBootServletInitializer.configure() method, * Override the default SpringBootServletInitializer.configure() method,
* passing it this Application class. * passing it this Application class.

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.authorization.impl;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.function.Predicate;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.authorization.AuthorizationFeature;
import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
import org.dspace.app.rest.model.BaseObjectRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* The synchronization with ORCID feature. It can be used to verify
* if the user can synchronize with ORCID.
*
* @author Mohamed Eskander (mohamed.eskander at 4science.it)
*/
@Component
@AuthorizationFeatureDocumentation(name = CanSynchronizeWithORCID.NAME,
description = "It can be used to verify if the user can synchronize with ORCID")
public class CanSynchronizeWithORCID implements AuthorizationFeature {
public static final String NAME = "canSynchronizeWithORCID";
@Autowired
private ItemService itemService;
/**
* This method returns true if the BaseObjectRest object is an instance of
* {@link ItemRest}, there is a current user in the {@link Context} and it is
* the owner of that item. Otherwise this method returns false.
*/
@Override
public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException {
EPerson ePerson = context.getCurrentUser();
if (!(object instanceof ItemRest) || Objects.isNull(ePerson)) {
return false;
}
String id = ((ItemRest) object).getId();
Item item = itemService.find(context, UUID.fromString(id));
return isDspaceObjectOwner(ePerson, item);
}
@Override
public String[] getSupportedTypes() {
return new String[] { ItemRest.CATEGORY + "." + ItemRest.NAME };
}
/**
* This method returns true if the given eperson is not null and if the given
* item has the metadata field dspace.object.owner with an authority equals to
* the uuid of the eperson. Otherwise this method returns false.
*/
private boolean isDspaceObjectOwner(EPerson eperson, Item item) {
if (eperson == null) {
return false;
}
List<MetadataValue> owners = itemService.getMetadataByMetadataString(item, "dspace.object.owner");
Predicate<MetadataValue> checkOwner = v -> StringUtils.equals(v.getAuthority(), eperson.getID().toString());
return owners.stream().anyMatch(checkOwner);
}
}

View File

@@ -7,10 +7,24 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import static org.dspace.app.orcid.model.OrcidEntityType.FUNDING;
import static org.dspace.app.orcid.model.OrcidEntityType.PUBLICATION;
import java.util.List;
import java.util.stream.Collectors;
import org.dspace.app.orcid.service.OrcidSynchronizationService;
import org.dspace.app.profile.OrcidEntitySyncPreference;
import org.dspace.app.profile.OrcidProfileSyncPreference;
import org.dspace.app.profile.OrcidSynchronizationMode;
import org.dspace.app.profile.ResearcherProfile; import org.dspace.app.profile.ResearcherProfile;
import org.dspace.app.rest.model.ResearcherProfileRest; import org.dspace.app.rest.model.ResearcherProfileRest;
import org.dspace.app.rest.model.ResearcherProfileRest.OrcidSynchronizationRest;
import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.projection.Projection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.web.ContextUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -23,6 +37,9 @@ import org.springframework.stereotype.Component;
@Component @Component
public class ResearcherProfileConverter implements DSpaceConverter<ResearcherProfile, ResearcherProfileRest> { public class ResearcherProfileConverter implements DSpaceConverter<ResearcherProfile, ResearcherProfileRest> {
@Autowired
private OrcidSynchronizationService orcidSynchronizationService;
@Override @Override
public ResearcherProfileRest convert(ResearcherProfile profile, Projection projection) { public ResearcherProfileRest convert(ResearcherProfile profile, Projection projection) {
ResearcherProfileRest researcherProfileRest = new ResearcherProfileRest(); ResearcherProfileRest researcherProfileRest = new ResearcherProfileRest();
@@ -33,12 +50,49 @@ public class ResearcherProfileConverter implements DSpaceConverter<ResearcherPro
Item item = profile.getItem(); Item item = profile.getItem();
Context context = ContextUtil.obtainCurrentRequestContext();
if (orcidSynchronizationService.isLinkedToOrcid(context, item)) {
profile.getOrcid().ifPresent(researcherProfileRest::setOrcid);
OrcidSynchronizationRest orcidSynchronization = new OrcidSynchronizationRest();
orcidSynchronization.setMode(getMode(item));
orcidSynchronization.setProfilePreferences(getProfilePreferences(item));
orcidSynchronization.setFundingsPreference(getFundingsPreference(item));
orcidSynchronization.setPublicationsPreference(getPublicationsPreference(item));
researcherProfileRest.setOrcidSynchronization(orcidSynchronization);
}
return researcherProfileRest; return researcherProfileRest;
} }
private String getPublicationsPreference(Item item) {
return orcidSynchronizationService.getEntityPreference(item, PUBLICATION)
.map(OrcidEntitySyncPreference::name)
.orElse(OrcidEntitySyncPreference.DISABLED.name());
}
private String getFundingsPreference(Item item) {
return orcidSynchronizationService.getEntityPreference(item, FUNDING)
.map(OrcidEntitySyncPreference::name)
.orElse(OrcidEntitySyncPreference.DISABLED.name());
}
private List<String> getProfilePreferences(Item item) {
return orcidSynchronizationService.getProfilePreferences(item).stream()
.map(OrcidProfileSyncPreference::name)
.collect(Collectors.toList());
}
private String getMode(Item item) {
return orcidSynchronizationService.getSynchronizationMode(item)
.map(OrcidSynchronizationMode::name)
.orElse(OrcidSynchronizationMode.MANUAL.name());
}
@Override @Override
public Class<ResearcherProfile> getModelClass() { public Class<ResearcherProfile> getModelClass() {
return ResearcherProfile.class; return ResearcherProfile.class;
} }
} }

View File

@@ -7,11 +7,11 @@
*/ */
package org.dspace.app.rest.exception; package org.dspace.app.rest.exception;
import org.dspace.app.rest.utils.DiscoverQueryBuilder; import org.dspace.app.rest.utils.RestDiscoverQueryBuilder;
/** /**
* This exception is thrown when the given search configuration * This exception is thrown when the given search configuration
* passed to {@link DiscoverQueryBuilder} is invalid * passed to {@link RestDiscoverQueryBuilder} is invalid
* *
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/ */
@@ -25,4 +25,4 @@ public class InvalidSearchRequestException extends RuntimeException {
super(message); super(message);
} }
} }

Some files were not shown because too many files have changed in this diff Show More