Merge branch 'main' into feature-relationship-versioning-contribution

This commit is contained in:
Bruno Roemers
2022-06-10 13:25:00 +02:00
219 changed files with 16485 additions and 522 deletions

View File

@@ -31,6 +31,11 @@ jobs:
# We turn off 'latest' tag by default. # We turn off 'latest' tag by default.
TAGS_FLAVOR: | TAGS_FLAVOR: |
latest=false latest=false
# Architectures / Platforms for which we will build Docker images
# If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work.
# If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH
# longer (around 45mins or so) which is why we only run it when pushing a new Docker image.
PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }}
steps: steps:
# https://github.com/actions/checkout # https://github.com/actions/checkout
@@ -41,6 +46,10 @@ jobs:
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v1 uses: docker/setup-buildx-action@v1
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU emulation to build for multiple architectures
uses: docker/setup-qemu-action@v2
# https://github.com/docker/login-action # https://github.com/docker/login-action
- name: Login to DockerHub - name: Login to DockerHub
# Only login if not a PR, as PRs only trigger a Docker build and not a push # Only login if not a PR, as PRs only trigger a Docker build and not a push
@@ -70,6 +79,7 @@ jobs:
with: with:
context: . context: .
file: ./Dockerfile.dependencies file: ./Dockerfile.dependencies
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build), # For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR # but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.event_name != 'pull_request' }}
@@ -95,6 +105,7 @@ jobs:
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build), # For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR # but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.event_name != 'pull_request' }}
@@ -123,6 +134,7 @@ jobs:
with: with:
context: . context: .
file: ./Dockerfile.test file: ./Dockerfile.test
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build), # For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR # but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.event_name != 'pull_request' }}
@@ -148,9 +160,10 @@ jobs:
with: with:
context: . context: .
file: ./Dockerfile.cli file: ./Dockerfile.cli
platforms: ${{ env.PLATFORMS }}
# For pull requests, we run the Docker build (to ensure no PR changes break the build), # For pull requests, we run the Docker build (to ensure no PR changes break the build),
# but we ONLY do an image push to DockerHub if it's NOT a PR # but we ONLY do an image push to DockerHub if it's NOT a PR
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.event_name != 'pull_request' }}
# Use tags / labels provided by 'docker/metadata-action' above # Use tags / labels provided by 'docker/metadata-action' above
tags: ${{ steps.meta_build_cli.outputs.tags }} tags: ${{ steps.meta_build_cli.outputs.tags }}
labels: ${{ steps.meta_build_cli.outputs.labels }} labels: ${{ steps.meta_build_cli.outputs.labels }}

View File

@@ -35,7 +35,7 @@ Documentation for each release may be viewed online or downloaded via our [Docum
The latest DSpace Installation instructions are available at: The latest DSpace Installation instructions are available at:
https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace
Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL)
and a servlet container (usually Tomcat) in order to function. and a servlet container (usually Tomcat) in order to function.
More information about these and all other prerequisites can be found in the Installation instructions above. More information about these and all other prerequisites can be found in the Installation instructions above.

View File

@@ -336,7 +336,6 @@
</profiles> </profiles>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.logging.log4j</groupId> <groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId> <artifactId>log4j-api</artifactId>
@@ -361,6 +360,23 @@
<artifactId>ehcache</artifactId> <artifactId>ehcache</artifactId>
<version>${ehcache.version}</version> <version>${ehcache.version}</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-cache
Caching dependencies for sherpa service. -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
<version>${spring-boot.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.hibernate</groupId> <groupId>org.hibernate</groupId>
<artifactId>hibernate-jpamodelgen</artifactId> <artifactId>hibernate-jpamodelgen</artifactId>
@@ -862,6 +878,13 @@
<artifactId>mockserver-junit-rule</artifactId> <artifactId>mockserver-junit-rule</artifactId>
<version>5.11.2</version> <version>5.11.2</version>
<scope>test</scope> <scope>test</scope>
<exclusions>
<!-- Exclude snakeyaml to avoid conflicts with: spring-boot-starter-cache -->
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
</dependencies> </dependencies>

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Plugin interface for the access status calculation.
*/
public interface AccessStatusHelper {
/**
* Calculate the access status for the item.
*
* @param context the DSpace context
* @param item the item
* @return an access status value
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException;
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.service.PluginService;
import org.dspace.services.ConfigurationService;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation for the access status calculation service.
*/
public class AccessStatusServiceImpl implements AccessStatusService {
// Plugin implementation, set from the DSpace configuration by init().
protected AccessStatusHelper helper = null;
protected Date forever_date = null;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected PluginService pluginService;
/**
* Initialize the bean (after dependency injection has already taken place).
* Ensures the configurationService is injected, so that we can get the plugin
* and the forever embargo date threshold from the configuration.
* Called by "init-method" in Spring configuration.
*
* @throws Exception on generic exception
*/
public void init() throws Exception {
if (helper == null) {
helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class);
if (helper == null) {
throw new IllegalStateException("The AccessStatusHelper plugin was not defined in "
+ "DSpace configuration.");
}
// Defines the embargo forever date threshold for the access status.
// Look at EmbargoService.FOREVER for some improvements?
int year = configurationService.getIntProperty("access.status.embargo.forever.year");
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
forever_date = new LocalDate(year, month, day).toDate();
}
}
@Override
public String getAccessStatus(Context context, Item item) throws SQLException {
return helper.getAccessStatusFromItem(context, item, forever_date);
}
}

View File

@@ -0,0 +1,159 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
/**
* Default plugin implementation of the access status helper.
* The getAccessStatusFromItem method provides a simple logic to
* calculate the access status of an item based on the policies of
* the primary or the first bitstream in the original bundle.
* Users can override this method for enhanced functionality.
*/
public class DefaultAccessStatusHelper implements AccessStatusHelper {
public static final String EMBARGO = "embargo";
public static final String METADATA_ONLY = "metadata.only";
public static final String OPEN_ACCESS = "open.access";
public static final String RESTRICTED = "restricted";
public static final String UNKNOWN = "unknown";
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected AuthorizeService authorizeService =
AuthorizeServiceFactory.getInstance().getAuthorizeService();
public DefaultAccessStatusHelper() {
super();
}
/**
* Look at the item's policies to determine an access status value.
* It is also considering a date threshold for embargos and restrictions.
*
* If the item is null, simply returns the "unknown" value.
*
* @param context the DSpace context
* @param item the item to embargo
* @param threshold the embargo threshold date
* @return an access status value
*/
@Override
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException {
if (item == null) {
return UNKNOWN;
}
// Consider only the original bundles.
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
// Check for primary bitstreams first.
Bitstream bitstream = bundles.stream()
.map(bundle -> bundle.getPrimaryBitstream())
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
if (bitstream == null) {
// If there is no primary bitstream,
// take the first bitstream in the bundles.
bitstream = bundles.stream()
.map(bundle -> bundle.getBitstreams())
.flatMap(List::stream)
.findFirst()
.orElse(null);
}
return caculateAccessStatusForDso(context, bitstream, threshold);
}
/**
* Look at the DSpace object's policies to determine an access status value.
*
* If the object is null, returns the "metadata.only" value.
* If any policy attached to the object is valid for the anonymous group,
* returns the "open.access" value.
* Otherwise, if the policy start date is before the embargo threshold date,
* returns the "embargo" value.
* Every other cases return the "restricted" value.
*
* @param context the DSpace context
* @param dso the DSpace object
* @param threshold the embargo threshold date
* @return an access status value
*/
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
throws SQLException {
if (dso == null) {
return METADATA_ONLY;
}
// Only consider read policies.
List<ResourcePolicy> policies = authorizeService
.getPoliciesActionFilter(context, dso, Constants.READ);
int openAccessCount = 0;
int embargoCount = 0;
int restrictedCount = 0;
int unknownCount = 0;
// Looks at all read policies.
for (ResourcePolicy policy : policies) {
boolean isValid = resourcePolicyService.isDateValid(policy);
Group group = policy.getGroup();
// The group must not be null here. However,
// if it is, consider this as an unexpected case.
if (group == null) {
unknownCount++;
} else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
// Only calculate the status for the anonymous group.
if (isValid) {
// If the policy is valid, the anonymous group have access
// to the bitstream.
openAccessCount++;
} else {
Date startDate = policy.getStartDate();
if (startDate != null && !startDate.before(threshold)) {
// If the policy start date have a value and if this value
// is equal or superior to the configured forever date, the
// access status is also restricted.
restrictedCount++;
} else {
// If the current date is not between the policy start date
// and end date, the access status is embargo.
embargoCount++;
}
}
}
}
if (openAccessCount > 0) {
return OPEN_ACCESS;
}
if (embargoCount > 0 && restrictedCount == 0) {
return EMBARGO;
}
if (unknownCount > 0) {
return UNKNOWN;
}
return RESTRICTED;
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public abstract class AccessStatusServiceFactory {
public abstract AccessStatusService getAccessStatusService();
public static AccessStatusServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class);
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory {
@Autowired(required = true)
private AccessStatusService accessStatusService;
@Override
public AccessStatusService getAccessStatusService() {
return accessStatusService;
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* <p>
* Access status allows the users to view the bitstreams availability before
* browsing into the item itself.
* </p>
* <p>
* The access status is calculated through a pluggable class:
* {@link org.dspace.access.status.AccessStatusHelper}.
* The {@link org.dspace.access.status.AccessStatusServiceImpl}
* must be configured to specify this class, as well as a forever embargo date
* threshold year, month and day.
* </p>
* <p>
* See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation
* based on the primary or the first bitstream of the original bundle. You can
* supply your own class to implement more complex access statuses.
* </p>
* <p>
* For now, the access status is calculated when the item is shown in a list.
* </p>
*/
package org.dspace.access.status;

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.service;
import java.sql.SQLException;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Public interface to the access status subsystem.
* <p>
* Configuration properties: (with examples)
* {@code
* # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to dermine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted.
* # You might want to change this threshold based on your needs. For example: some databases
* # doesn't accept a date superior to 31 december 9999.
* access.status.embargo.forever.year = 10000
* access.status.embargo.forever.month = 1
* access.status.embargo.forever.day = 1
* # implementation of access status helper plugin - replace with local implementation if applicable
* # This default access status helper provides an item status based on the policies of the primary
* # bitstream (or first bitstream in the original bundles if no primary file is specified).
* plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
* }
*/
public interface AccessStatusService {
/**
* Calculate the access status for an Item while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param item the item
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatus(Context context, Item item) throws SQLException;
}

View File

@@ -0,0 +1,32 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.exception;
/**
* This class provides an exception to be used when trying to save a resource
* that already exists.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ResourceAlreadyExistsException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Create a ResourceAlreadyExistsException with a message and the already
* existing resource.
*
* @param message the error message
*/
public ResourceAlreadyExistsException(String message) {
super(message);
}
}

View File

@@ -0,0 +1,83 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static org.dspace.core.Constants.READ;
import static org.dspace.eperson.Group.ANONYMOUS;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Stream;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.util.UUIDUtils;
import org.springframework.util.Assert;
/**
* Object representing a Researcher Profile.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ResearcherProfile {
private final Item item;
private final MetadataValue dspaceObjectOwner;
/**
* Create a new ResearcherProfile object from the given item.
*
* @param item the profile item
* @throws IllegalArgumentException if the given item has not a dspace.object.owner
* metadata with a valid authority
*/
public ResearcherProfile(Item item) {
Assert.notNull(item, "A researcher profile requires an item");
this.item = item;
this.dspaceObjectOwner = getDspaceObjectOwnerMetadata(item);
}
public UUID getId() {
return UUIDUtils.fromString(dspaceObjectOwner.getAuthority());
}
/**
* A profile is considered visible if accessible by anonymous users. This method
* returns true if the given item has a READ policy related to ANONYMOUS group,
* false otherwise.
*/
public boolean isVisible() {
return item.getResourcePolicies().stream()
.filter(policy -> policy.getGroup() != null)
.anyMatch(policy -> READ == policy.getAction() && ANONYMOUS.equals(policy.getGroup().getName()));
}
public Item getItem() {
return item;
}
private MetadataValue getDspaceObjectOwnerMetadata(Item item) {
return getMetadataValue(item, "dspace.object.owner")
.filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null)
.orElseThrow(
() -> new IllegalArgumentException("A profile item must have a valid dspace.object.owner metadata")
);
}
private Optional<MetadataValue> getMetadataValue(Item item, String metadataField) {
return getMetadataValues(item, metadataField).findFirst();
}
private Stream<MetadataValue> getMetadataValues(Item item, String metadataField) {
return item.getMetadata().stream()
.filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.')));
}
}

View File

@@ -0,0 +1,367 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile;
import static java.util.Optional.empty;
import static java.util.Optional.of;
import static java.util.Optional.ofNullable;
import static org.dspace.content.authority.Choices.CF_ACCEPTED;
import static org.dspace.core.Constants.READ;
import static org.dspace.core.Constants.WRITE;
import static org.dspace.eperson.Group.ANONYMOUS;
import java.io.IOException;
import java.net.URI;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.exception.ResourceAlreadyExistsException;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context;
import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.ConfigurationService;
import org.dspace.util.UUIDUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.Assert;
/**
* Implementation of {@link ResearcherProfileService}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class ResearcherProfileServiceImpl implements ResearcherProfileService {
private static Logger log = LoggerFactory.getLogger(ResearcherProfileServiceImpl.class);
@Autowired
private ItemService itemService;
@Autowired
private WorkspaceItemService workspaceItemService;
@Autowired
private InstallItemService installItemService;
@Autowired
private ConfigurationService configurationService;
@Autowired
private CollectionService collectionService;
@Autowired
private SearchService searchService;
@Autowired
private GroupService groupService;
@Autowired
private AuthorizeService authorizeService;
@Override
public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException {
Assert.notNull(id, "An id must be provided to find a researcher profile");
Item profileItem = findResearcherProfileItemById(context, id);
if (profileItem == null) {
return null;
}
return new ResearcherProfile(profileItem);
}
@Override
public ResearcherProfile createAndReturn(Context context, EPerson ePerson)
throws AuthorizeException, SQLException, SearchServiceException {
Item profileItem = findResearcherProfileItemById(context, ePerson.getID());
if (profileItem != null) {
throw new ResourceAlreadyExistsException("A profile is already linked to the provided User");
}
Collection collection = findProfileCollection(context)
.orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles"));
context.turnOffAuthorisationSystem();
try {
Item item = createProfileItem(context, ePerson, collection);
return new ResearcherProfile(item);
} finally {
context.restoreAuthSystemState();
}
}
@Override
public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException {
Assert.notNull(id, "An id must be provided to find a researcher profile");
Item profileItem = findResearcherProfileItemById(context, id);
if (profileItem == null) {
return;
}
if (isHardDeleteEnabled()) {
deleteItem(context, profileItem);
} else {
removeOwnerMetadata(context, profileItem);
}
}
@Override
public void changeVisibility(Context context, ResearcherProfile profile, boolean visible)
throws AuthorizeException, SQLException {
if (profile.isVisible() == visible) {
return;
}
Item item = profile.getItem();
Group anonymous = groupService.findByName(context, ANONYMOUS);
if (visible) {
authorizeService.addPolicy(context, item, READ, anonymous);
} else {
authorizeService.removeGroupPolicies(context, item, anonymous);
}
}
@Override
public ResearcherProfile claim(Context context, EPerson ePerson, URI uri)
throws SQLException, AuthorizeException, SearchServiceException {
Item profileItem = findResearcherProfileItemById(context, ePerson.getID());
if (profileItem != null) {
throw new ResourceAlreadyExistsException("A profile is already linked to the provided User");
}
Item item = findItemByURI(context, uri)
.orElseThrow(() -> new IllegalArgumentException("No item found by URI " + uri));
if (!item.isArchived() || item.isWithdrawn()) {
throw new IllegalArgumentException(
"Only archived items can be claimed to create a researcher profile. Item ID: " + item.getID());
}
if (!hasProfileType(item)) {
throw new IllegalArgumentException("The provided item has not a profile type. Item ID: " + item.getID());
}
if (haveDifferentEmail(item, ePerson)) {
throw new IllegalArgumentException("The provided item is not claimable because it has a different email "
+ "than the given user's email. Item ID: " + item.getID());
}
String existingOwner = itemService.getMetadataFirstValue(item, "dspace", "object", "owner", Item.ANY);
if (StringUtils.isNotBlank(existingOwner)) {
throw new IllegalArgumentException("Item with provided uri has already an owner - ID: " + existingOwner);
}
context.turnOffAuthorisationSystem();
itemService.addMetadata(context, item, "dspace", "object", "owner", null,
ePerson.getName(), ePerson.getID().toString(), CF_ACCEPTED);
context.restoreAuthSystemState();
return new ResearcherProfile(item);
}
@Override
public boolean hasProfileType(Item item) {
String profileType = getProfileType();
if (StringUtils.isBlank(profileType)) {
return false;
}
return profileType.equals(itemService.getEntityTypeLabel(item));
}
@Override
public String getProfileType() {
return configurationService.getProperty("researcher-profile.entity-type", "Person");
}
private Optional<Item> findItemByURI(final Context context, final URI uri) throws SQLException {
String path = uri.getPath();
UUID uuid = UUIDUtils.fromString(path.substring(path.lastIndexOf("/") + 1));
return ofNullable(itemService.find(context, uuid));
}
/**
* Search for an profile item owned by an eperson with the given id.
*/
private Item findResearcherProfileItemById(Context context, UUID id) throws SQLException, AuthorizeException {
String profileType = getProfileType();
Iterator<Item> items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString());
while (items.hasNext()) {
Item item = items.next();
String entityType = itemService.getEntityTypeLabel(item);
if (profileType.equals(entityType)) {
return item;
}
}
return null;
}
/**
* Returns a Profile collection based on a configuration or searching for a
* collection of researcher profile type.
*/
private Optional<Collection> findProfileCollection(Context context) throws SQLException, SearchServiceException {
return findConfiguredProfileCollection(context)
.or(() -> findFirstCollectionByProfileEntityType(context));
}
/**
* Create a new profile item for the given ePerson in the provided collection.
*/
private Item createProfileItem(Context context, EPerson ePerson, Collection collection)
throws AuthorizeException, SQLException {
String id = ePerson.getID().toString();
String fullName = ePerson.getFullName();
WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true);
Item item = workspaceItem.getItem();
itemService.addMetadata(context, item, "dc", "title", null, null, fullName);
itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail());
itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED);
item = installItemService.installItem(context, workspaceItem);
if (isNewProfileNotVisibleByDefault()) {
Group anonymous = groupService.findByName(context, ANONYMOUS);
authorizeService.removeGroupPolicies(context, item, anonymous);
}
authorizeService.addPolicy(context, item, READ, ePerson);
authorizeService.addPolicy(context, item, WRITE, ePerson);
return reloadItem(context, item);
}
private Optional<Collection> findConfiguredProfileCollection(Context context) throws SQLException {
UUID uuid = UUIDUtils.fromString(configurationService.getProperty("researcher-profile.collection.uuid"));
if (uuid == null) {
return Optional.empty();
}
Collection collection = collectionService.find(context, uuid);
if (collection == null) {
return Optional.empty();
}
if (isNotProfileCollection(collection)) {
log.warn("The configured researcher-profile.collection.uuid "
+ "has an invalid entity type, expected " + getProfileType());
return Optional.empty();
}
return of(collection);
}
@SuppressWarnings("rawtypes")
private Optional<Collection> findFirstCollectionByProfileEntityType(Context context) {
String profileType = getProfileType();
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE);
discoverQuery.addFilterQueries("dspace.entity.type:" + profileType);
DiscoverResult discoverResult = search(context, discoverQuery);
List<IndexableObject> indexableObjects = discoverResult.getIndexableObjects();
if (CollectionUtils.isEmpty(indexableObjects)) {
return empty();
}
return ofNullable((Collection) indexableObjects.get(0).getIndexedObject());
}
private boolean isHardDeleteEnabled() {
return configurationService.getBooleanProperty("researcher-profile.hard-delete.enabled");
}
private boolean isNewProfileNotVisibleByDefault() {
return !configurationService.getBooleanProperty("researcher-profile.set-new-profile-visible");
}
private boolean isNotProfileCollection(Collection collection) {
String entityType = collectionService.getMetadataFirstValue(collection, "dspace", "entity", "type", Item.ANY);
return entityType == null || !entityType.equals(getProfileType());
}
private boolean haveDifferentEmail(Item item, EPerson currentUser) {
return itemService.getMetadataByMetadataString(item, "person.email").stream()
.map(MetadataValue::getValue)
.filter(StringUtils::isNotBlank)
.noneMatch(email -> email.equalsIgnoreCase(currentUser.getEmail()));
}
private void removeOwnerMetadata(Context context, Item profileItem) throws SQLException {
List<MetadataValue> metadata = itemService.getMetadata(profileItem, "dspace", "object", "owner", Item.ANY);
itemService.removeMetadataValues(context, profileItem, metadata);
}
private Item reloadItem(Context context, Item item) throws SQLException {
context.uncacheEntity(item);
return context.reloadEntity(item);
}
private void deleteItem(Context context, Item profileItem) throws SQLException, AuthorizeException {
try {
context.turnOffAuthorisationSystem();
itemService.delete(context, profileItem);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
context.restoreAuthSystemState();
}
}
private DiscoverResult search(Context context, DiscoverQuery discoverQuery) {
try {
return searchService.search(context, discoverQuery);
} catch (SearchServiceException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,112 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.profile.service;
import java.net.URI;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.profile.ResearcherProfile;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.discovery.SearchServiceException;
import org.dspace.eperson.EPerson;
/**
* Service interface class for the {@link ResearcherProfile} object. The
* implementation of this class is responsible for all business logic calls for
* the {@link ResearcherProfile} object.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public interface ResearcherProfileService {
/**
* Find the ResearcherProfile by UUID.
*
* @param context the relevant DSpace Context.
* @param id the ResearcherProfile id
* @return the found ResearcherProfile
* @throws SQLException
* @throws AuthorizeException
*/
public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException;
/**
* Create a new researcher profile for the given ePerson.
*
* @param context the relevant DSpace Context.
* @param ePerson the ePerson
* @return the created profile
* @throws SQLException
* @throws AuthorizeException
* @throws SearchServiceException
*/
public ResearcherProfile createAndReturn(Context context, EPerson ePerson)
throws AuthorizeException, SQLException, SearchServiceException;
/**
* Delete the profile with the given id. Based on the
* researcher-profile.hard-delete.enabled configuration, this method deletes the
* related item or removes the association between the researcher profile and
* eperson related to the input uuid.
*
* @param context the relevant DSpace Context.
* @param id the researcher profile id
* @throws AuthorizeException
* @throws SQLException
*/
public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException;
/**
* Changes the visibility of the given profile using the given new visible
* value. The visiblity controls whether the Profile is Anonymous READ or not.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update
* @param visible the visible value to set. If true the profile will
* be visible to all users.
* @throws SQLException
* @throws AuthorizeException
*/
public void changeVisibility(Context context, ResearcherProfile profile, boolean visible)
throws AuthorizeException, SQLException;
/**
* Claims and links an eperson to an existing DSpaceObject
* @param context the relevant DSpace Context.
* @param ePerson the ePerson
* @param uri uri of existing Item to be linked to the
* eperson
* @return the created profile
* @throws IllegalArgumentException if the given uri is not related to an
* archived item or if the item cannot be
* claimed
*/
ResearcherProfile claim(Context context, EPerson ePerson, URI uri)
throws SQLException, AuthorizeException, SearchServiceException;
/**
* Check if the given item has an entity type compatible with that of the
* researcher profile. If the given item does not have an entity type, the check
* returns false.
*
* @param item the item to check
* @return the check result
*/
boolean hasProfileType(Item item);
/**
* Returns the profile entity type, if any.
*
* @return the profile type
*/
String getProfileType();
}

View File

@@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse;
import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.app.sherpa.v2.SHERPAUtils;
import org.dspace.services.ConfigurationService; import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
/** /**
* SHERPAService is responsible for making the HTTP call to the SHERPA v2 API * SHERPAService is responsible for making the HTTP call to the SHERPA v2 API
@@ -43,6 +44,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPAService { public class SHERPAService {
private CloseableHttpClient client = null; private CloseableHttpClient client = null;
private int maxNumberOfTries; private int maxNumberOfTries;
@@ -91,6 +93,7 @@ public class SHERPAService {
* @param query ISSN string to pass in an "issn equals" API query * @param query ISSN string to pass in an "issn equals" API query
* @return SHERPAResponse containing an error or journal policies * @return SHERPAResponse containing an error or journal policies
*/ */
@Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN")
public SHERPAResponse searchByJournalISSN(String query) { public SHERPAResponse searchByJournalISSN(String query) {
return performRequest("publication", "issn", "equals", query, 0, 1); return performRequest("publication", "issn", "equals", query, 0, 1);
} }
@@ -413,4 +416,5 @@ public class SHERPAService {
public void setTimeout(int timeout) { public void setTimeout(int timeout) {
this.timeout = timeout; this.timeout = timeout;
} }
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import java.util.Objects;
import java.util.Set;
import org.dspace.app.sherpa.submit.SHERPASubmitService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.springframework.cache.CacheManager;
/**
* This service is responsible to deal with the SherpaService cache.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class SherpaCacheEvictService {
// The cache that is managed by this service.
static final String CACHE_NAME = "sherpa.searchByJournalISSN";
private CacheManager cacheManager;
private SHERPASubmitService sherpaSubmitService;
/**
* Remove immediately from the cache all the response that are related to a specific item
* extracting the ISSNs from the item
*
* @param context The DSpace context
* @param item an Item
*/
public void evictCacheValues(Context context, Item item) {
Set<String> ISSNs = sherpaSubmitService.getISSNs(context, item);
for (String issn : ISSNs) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn);
}
}
/**
* Invalidate immediately the Sherpa cache
*/
public void evictAllCacheValues() {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate();
}
/**
* Set the reference to the cacheManager
*
* @param cacheManager
*/
public void setCacheManager(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
/**
* Set the reference to the SherpaSubmitService
*
* @param sherpaSubmitService
*/
public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) {
this.sherpaSubmitService = sherpaSubmitService;
}
}

View File

@@ -0,0 +1,34 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.cache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener;
/**
* This is a EHCache listner responsible for logging sherpa cache events. It is
* bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a
* dedicated Logger for each cache as the CacheEvent doesn't include details
* about where the event occur
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*
*/
public class SherpaCacheLogger implements CacheEventListener<Object, Object> {
private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class);
@Override
public void onEvent(CacheEvent<?, ?> cacheEvent) {
log.debug("Sherpa Cache Event Type: {} | Key: {} ",
cacheEvent.getType(), cacheEvent.getKey());
}
}

View File

@@ -9,7 +9,6 @@ package org.dspace.app.sherpa.submit;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -63,19 +62,19 @@ public class SHERPASubmitService {
* issnItemExtractor(s) in the SHERPA spring configuration. * issnItemExtractor(s) in the SHERPA spring configuration.
* The ISSNs are not validated with a regular expression or other rules - any values * The ISSNs are not validated with a regular expression or other rules - any values
* extracted will be included in API queries. * extracted will be included in API queries.
* Return the first not empty response from Sherpa
* @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @see "dspace-dspace-addon-sherpa-configuration-services.xml"
* @param context DSpace context * @param context DSpace context
* @param item DSpace item containing ISSNs to be checked * @param item DSpace item containing ISSNs to be checked
* @return SHERPA v2 API response (policy data) * @return SHERPA v2 API response (policy data)
*/ */
public List<SHERPAResponse> searchRelatedJournals(Context context, Item item) { public SHERPAResponse searchRelatedJournals(Context context, Item item) {
Set<String> issns = getISSNs(context, item); Set<String> issns = getISSNs(context, item);
if (issns == null || issns.size() == 0) { if (issns == null || issns.size() == 0) {
return null; return null;
} else { } else {
// SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead // SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead
Iterator<String> issnIterator = issns.iterator(); Iterator<String> issnIterator = issns.iterator();
List<SHERPAResponse> responses = new LinkedList<>();
while (issnIterator.hasNext()) { while (issnIterator.hasNext()) {
String issn = issnIterator.next(); String issn = issnIterator.next();
SHERPAResponse response = sherpaService.searchByJournalISSN(issn); SHERPAResponse response = sherpaService.searchByJournalISSN(issn);
@@ -83,14 +82,13 @@ public class SHERPASubmitService {
// Continue with loop // Continue with loop
log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn
+ ": " + response.getMessage()); + ": " + response.getMessage());
return response;
} else if (!response.getJournals().isEmpty()) {
// return this response, if it is not empty
return response;
} }
// Store this response, even if it has an error (useful for UI reporting)
responses.add(response);
} }
if (responses.isEmpty()) { return new SHERPAResponse();
responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed"));
}
return responses;
} }
} }

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/**
* Model class for the Embargo of SHERPAv2 API (JSON)
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class SHERPAEmbargo implements Serializable {
private static final long serialVersionUID = 6140668058547523656L;
private int amount;
private String units;
public SHERPAEmbargo(int amount, String units) {
this.amount = amount;
this.units = units;
}
public int getAmount() {
return amount;
}
public void setAmount(int amount) {
this.amount = amount;
}
public String getUnits() {
return units;
}
public void setUnits(String units) {
this.units = units;
}
}

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
/** /**
@@ -21,7 +22,7 @@ import java.util.List;
* *
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPAJournal { public class SHERPAJournal implements Serializable {
private List<String> titles; private List<String> titles;
private String url; private String url;

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
/** /**
@@ -28,7 +29,9 @@ import java.util.List;
* *
* @see SHERPAPublisherPolicy * @see SHERPAPublisherPolicy
*/ */
public class SHERPAPermittedVersion { public class SHERPAPermittedVersion implements Serializable {
private static final long serialVersionUID = 4992181606327727442L;
// Version (submitted, accepted, published) // Version (submitted, accepted, published)
private String articleVersion; private String articleVersion;
@@ -47,11 +50,6 @@ public class SHERPAPermittedVersion {
// Embargo // Embargo
private SHERPAEmbargo embargo; private SHERPAEmbargo embargo;
protected static class SHERPAEmbargo {
String units;
int amount;
}
public String getArticleVersion() { public String getArticleVersion() {
return articleVersion; return articleVersion;
} }

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/** /**
* Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses. * Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses.
* *
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
* @see SHERPAJournal * @see SHERPAJournal
* @see SHERPAPublisherResponse * @see SHERPAPublisherResponse
*/ */
public class SHERPAPublisher { public class SHERPAPublisher implements Serializable {
private String name = null; private String name = null;
private String relationshipType; private String relationshipType;
private String country; private String country;

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -22,7 +23,7 @@ import java.util.Map;
* @see SHERPAJournal * @see SHERPAJournal
* @see SHERPAPermittedVersion * @see SHERPAPermittedVersion
*/ */
public class SHERPAPublisherPolicy { public class SHERPAPublisherPolicy implements Serializable {
private int id; private int id;
private boolean openAccessPermitted; private boolean openAccessPermitted;

View File

@@ -10,12 +10,15 @@ package org.dspace.app.sherpa.v2;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.json.JSONArray; import org.json.JSONArray;
@@ -33,7 +36,10 @@ import org.json.JSONTokener;
* @author Kim Shepherd * @author Kim Shepherd
* *
*/ */
public class SHERPAResponse { public class SHERPAResponse implements Serializable {
private static final long serialVersionUID = 2732963970169240597L;
// Is this response to be treated as an error? // Is this response to be treated as an error?
private boolean error; private boolean error;
@@ -52,6 +58,9 @@ public class SHERPAResponse {
// SHERPA URI (the human page version of this API response) // SHERPA URI (the human page version of this API response)
private String uri; private String uri;
@JsonIgnore
private Date retrievalTime = new Date();
// Format enum - currently only JSON is supported // Format enum - currently only JSON is supported
public enum SHERPAFormat { public enum SHERPAFormat {
JSON, XML JSON, XML
@@ -71,6 +80,11 @@ public class SHERPAResponse {
} }
} }
/**
* Create an empty SHERPAResponse representation
*/
public SHERPAResponse() {}
/** /**
* Parse the SHERPA v2 API JSON and construct Romeo policy data for display * Parse the SHERPA v2 API JSON and construct Romeo policy data for display
* This method does not return a value, but rather populates the metadata and journals objects * This method does not return a value, but rather populates the metadata and journals objects
@@ -479,6 +493,12 @@ public class SHERPAResponse {
} }
permittedVersion.setLicenses(sherpaLicenses); permittedVersion.setLicenses(sherpaLicenses);
if (permitted.has("embargo")) {
JSONObject embargo = permitted.getJSONObject("embargo");
SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units"));
permittedVersion.setEmbargo(SHERPAEmbargo);
}
return permittedVersion; return permittedVersion;
} }
@@ -542,4 +562,8 @@ public class SHERPAResponse {
public SHERPASystemMetadata getMetadata() { public SHERPASystemMetadata getMetadata() {
return metadata; return metadata;
} }
public Date getRetrievalTime() {
return retrievalTime;
}
} }

View File

@@ -7,6 +7,8 @@
*/ */
package org.dspace.app.sherpa.v2; package org.dspace.app.sherpa.v2;
import java.io.Serializable;
/** /**
* Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses. * Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses.
* *
@@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2;
* *
* @author Kim Shepherd * @author Kim Shepherd
*/ */
public class SHERPASystemMetadata { public class SHERPASystemMetadata implements Serializable {
private int id; private int id;
private String uri; private String uri;

View File

@@ -561,6 +561,15 @@ public class DCInput {
return true; return true;
} }
/**
* Get the type bind list for use in determining whether
* to display this field in angular dynamic form building
* @return list of bound types
*/
public List<String> getTypeBindList() {
return typeBind;
}
/** /**
* Verify whether the current field contains an entity relationship * Verify whether the current field contains an entity relationship
* This also implies a relationship type is defined for this field * This also implies a relationship type is defined for this field

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.app.util; package org.dspace.app.util;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -176,4 +177,50 @@ public class DCInputSet {
return true; return true;
} }
/**
* Iterate DC input rows and populate a list of all allowed field names in this submission configuration.
* This is important because an input can be configured repeatedly in a form (for example it could be required
* for type Book, and allowed but not required for type Article).
* If the field is allowed for this document type it'll never be stripped from metadata on validation.
*
* This can be more efficient than isFieldPresent to avoid looping the input set with each check.
*
* @param documentTypeValue Document type eg. Article, Book
* @return ArrayList of field names to use in validation
*/
public List<String> populateAllowedFieldNames(String documentTypeValue) {
List<String> allowedFieldNames = new ArrayList<>();
// Before iterating each input for validation, run through all inputs + fields and populate a lookup
// map with inputs for this type. Because an input can be configured repeatedly in a form (for example
// it could be required for type Book, and allowed but not required for type Article), allowed=true will
// always take precedence
for (DCInput[] row : inputs) {
for (DCInput input : row) {
if (input.isQualdropValue()) {
List<Object> inputPairs = input.getPairs();
//starting from the second element of the list and skipping one every time because the display
// values are also in the list and before the stored values.
for (int i = 1; i < inputPairs.size(); i += 2) {
String fullFieldname = input.getFieldName() + "." + inputPairs.get(i);
if (input.isAllowedFor(documentTypeValue)) {
if (!allowedFieldNames.contains(fullFieldname)) {
allowedFieldNames.add(fullFieldname);
}
// For the purposes of qualdrop, we have to add the field name without the qualifier
// too, or a required qualdrop will get confused and incorrectly reject a value
if (!allowedFieldNames.contains(input.getFieldName())) {
allowedFieldNames.add(input.getFieldName());
}
}
}
} else {
if (input.isAllowedFor(documentTypeValue) && !allowedFieldNames.contains(input.getFieldName())) {
allowedFieldNames.add(input.getFieldName());
}
}
}
}
return allowedFieldNames;
}
} }

View File

@@ -193,13 +193,11 @@ public class SyndicationFeed {
String defaultTitle = null; String defaultTitle = null;
boolean podcastFeed = false; boolean podcastFeed = false;
this.request = request; this.request = request;
// dso is null for the whole site, or a search without scope // dso is null for the whole site, or a search without scope
if (dso == null) { if (dso == null) {
defaultTitle = configurationService.getProperty("dspace.name"); defaultTitle = configurationService.getProperty("dspace.name");
feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION));
objectURL = resolveURL(request, null); objectURL = resolveURL(request, null);
logoURL = configurationService.getProperty("webui.feed.logo.url");
} else { } else {
Bitstream logo = null; Bitstream logo = null;
if (dso instanceof IndexableCollection) { if (dso instanceof IndexableCollection) {
@@ -329,7 +327,8 @@ public class SyndicationFeed {
dcDescriptionField != null) { dcDescriptionField != null) {
DCModule dc = new DCModuleImpl(); DCModule dc = new DCModuleImpl();
if (dcCreatorField != null) { if (dcCreatorField != null) {
List<MetadataValue> dcAuthors = itemService.getMetadataByMetadataString(item, dcCreatorField); List<MetadataValue> dcAuthors = itemService
.getMetadataByMetadataString(item, dcCreatorField);
if (dcAuthors.size() > 0) { if (dcAuthors.size() > 0) {
List<String> creators = new ArrayList<>(); List<String> creators = new ArrayList<>();
for (MetadataValue author : dcAuthors) { for (MetadataValue author : dcAuthors) {
@@ -345,7 +344,8 @@ public class SyndicationFeed {
} }
} }
if (dcDescriptionField != null) { if (dcDescriptionField != null) {
List<MetadataValue> v = itemService.getMetadataByMetadataString(item, dcDescriptionField); List<MetadataValue> v = itemService
.getMetadataByMetadataString(item, dcDescriptionField);
if (v.size() > 0) { if (v.size() > 0) {
StringBuilder descs = new StringBuilder(); StringBuilder descs = new StringBuilder();
for (MetadataValue d : v) { for (MetadataValue d : v) {
@@ -376,6 +376,7 @@ public class SyndicationFeed {
enc.setLength(bit.getSizeBytes()); enc.setLength(bit.getSizeBytes());
enc.setUrl(urlOfBitstream(request, bit)); enc.setUrl(urlOfBitstream(request, bit));
enclosures.add(enc); enclosures.add(enc);
} }
} }
} }

View File

@@ -50,7 +50,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
*/ */
protected SolrClient solr = null; protected SolrClient solr = null;
protected SolrClient getSolr() public SolrClient getSolr()
throws MalformedURLException, SolrServerException, IOException { throws MalformedURLException, SolrServerException, IOException {
if (solr == null) { if (solr == null) {
@@ -67,7 +67,11 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
solrServer.query(solrQuery); try {
solrServer.query(solrQuery);
} catch (Exception ex) {
log.error("An error occurs querying authority solr core", ex);
}
solr = solrServer; solr = solrServer;
} }

View File

@@ -7,6 +7,9 @@
*/ */
package org.dspace.authorize; package org.dspace.authorize;
import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts;
import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@@ -900,6 +903,16 @@ public class AuthorizeServiceImpl implements AuthorizeService {
return discoverResult.getTotalSearchResults(); return discoverResult.getTotalSearchResults();
} }
@Override
public boolean isAccountManager(Context context) {
try {
return (canCommunityAdminManageAccounts() && isCommunityAdmin(context)
|| canCollectionAdminManageAccounts() && isCollectionAdmin(context));
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private boolean performCheck(Context context, String query) throws SQLException { private boolean performCheck(Context context, String query) throws SQLException {
if (context.getCurrentUser() == null) { if (context.getCurrentUser() == null) {
return false; return false;

View File

@@ -592,4 +592,12 @@ public interface AuthorizeService {
*/ */
long countAdminAuthorizedCollection(Context context, String query) long countAdminAuthorizedCollection(Context context, String query)
throws SearchServiceException, SQLException; throws SearchServiceException, SQLException;
/**
* Returns true if the current user can manage accounts.
*
* @param context context with the current user
* @return true if the current user can manage accounts
*/
boolean isAccountManager(Context context);
} }

View File

@@ -1139,6 +1139,50 @@ prevent the generation of resource policy entry values with null dspace_object a
return !(hasCustomPolicy && isAnonimousGroup && datesAreNull); return !(hasCustomPolicy && isAnonimousGroup && datesAreNull);
} }
/**
* Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* An exception that provides information on a database access error or other errors.
* @throws AuthorizeException if authorization error
* Exception indicating the current user of the context does not have permission
* to perform a particular action.
*/
@Override
public Iterator<Item> findArchivedByMetadataField(Context context,
String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException {
MetadataSchema mds = metadataSchemaService.find(context, schema);
if (mds == null) {
throw new IllegalArgumentException("No such metadata schema: " + schema);
}
MetadataField mdf = metadataFieldService.findByElement(context, mds, element, qualifier);
if (mdf == null) {
throw new IllegalArgumentException(
"No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier);
}
if (Item.ANY.equals(value)) {
return itemDAO.findByMetadataField(context, mdf, null, true);
} else {
return itemDAO.findByMetadataField(context, mdf, value, true);
}
}
@Override
public Iterator<Item> findArchivedByMetadataField(Context context, String metadataField, String value)
throws SQLException, AuthorizeException {
String[] mdValueByField = getMDValueByField(metadataField);
return findArchivedByMetadataField(context, mdValueByField[0], mdValueByField[1], mdValueByField[2], value);
}
/** /**
* Returns an iterator of Items possessing the passed metadata field, or only * Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY * those matching the passed value, if value is not Item.ANY

View File

@@ -0,0 +1,127 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.authority;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.util.UUIDUtils;
import org.dspace.web.ContextUtil;
/**
* Implementation of {@link ChoiceAuthority} based on EPerson. Allows you to set
* the id of an eperson as authority.
*
* @author Mykhaylo Boychuk (4science.it)
*/
public class EPersonAuthority implements ChoiceAuthority {
private static final Logger log = LogManager.getLogger(EPersonAuthority.class);
/**
* the name assigned to the specific instance by the PluginService, @see
* {@link NameAwarePlugin}
**/
private String authorityName;
private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
@Override
public Choices getBestMatch(String text, String locale) {
return getMatches(text, 0, 2, locale);
}
@Override
public Choices getMatches(String text, int start, int limit, String locale) {
if (limit <= 0) {
limit = 20;
}
Context context = getContext();
List<EPerson> ePersons = searchEPersons(context, text, start, limit);
List<Choice> choiceList = new ArrayList<Choice>();
for (EPerson eperson : ePersons) {
choiceList.add(new Choice(eperson.getID().toString(), eperson.getFullName(), eperson.getFullName()));
}
Choice[] results = new Choice[choiceList.size()];
results = choiceList.toArray(results);
return new Choices(results, start, ePersons.size(), Choices.CF_AMBIGUOUS, ePersons.size() > (start + limit), 0);
}
@Override
public String getLabel(String key, String locale) {
UUID uuid = UUIDUtils.fromString(key);
if (uuid == null) {
return null;
}
Context context = getContext();
try {
EPerson ePerson = ePersonService.find(context, uuid);
return ePerson != null ? ePerson.getFullName() : null;
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private List<EPerson> searchEPersons(Context context, String text, int start, int limit) {
if (!isCurrentUserAdminOrAccessGroupManager(context)) {
return Collections.emptyList();
}
try {
return ePersonService.search(context, text, start, limit);
} catch (SQLException e) {
log.error(e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
}
}
private Context getContext() {
Context context = ContextUtil.obtainCurrentRequestContext();
return context != null ? context : new Context();
}
private boolean isCurrentUserAdminOrAccessGroupManager(Context context) {
try {
return authorizeService.isAdmin(context) || authorizeService.isAccountManager(context);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public String getPluginInstanceName() {
return authorityName;
}
@Override
public void setPluginInstanceName(String name) {
this.authorityName = name;
}
}

View File

@@ -594,6 +594,37 @@ public interface ItemService
*/ */
public boolean canCreateNewVersion(Context context, Item item) throws SQLException; public boolean canCreateNewVersion(Context context, Item item) throws SQLException;
/**
* Returns an iterator of in archive items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param schema metadata field schema
* @param element metadata field element
* @param qualifier metadata field qualifier
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Iterator<Item> findArchivedByMetadataField(Context context, String schema,
String element, String qualifier,
String value) throws SQLException, AuthorizeException;
/**
* Returns an iterator of in archive items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY
*
* @param context DSpace context object
* @param metadataField metadata
* @param value field value or Item.ANY to match any value
* @return an iterator over the items matching that authority value
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
*/
public Iterator<Item> findArchivedByMetadataField(Context context, String metadataField, String value)
throws SQLException, AuthorizeException;
/** /**
* Returns an iterator of Items possessing the passed metadata field, or only * Returns an iterator of Items possessing the passed metadata field, or only
* those matching the passed value, if value is not Item.ANY * those matching the passed value, if value is not Item.ANY
@@ -633,7 +664,7 @@ public interface ItemService
*/ */
public Iterator<Item> findByAuthorityValue(Context context, public Iterator<Item> findByAuthorityValue(Context context,
String schema, String element, String qualifier, String value) String schema, String element, String qualifier, String value)
throws SQLException, AuthorizeException, IOException; throws SQLException, AuthorizeException;
public Iterator<Item> findByMetadataFieldAuthority(Context context, String mdString, String authority) public Iterator<Item> findByMetadataFieldAuthority(Context context, String mdString, String authority)

View File

@@ -10,6 +10,7 @@ package org.dspace.core;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Deque; import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
@@ -91,12 +92,12 @@ public class Context implements AutoCloseable {
/** /**
* Group IDs of special groups user is a member of * Group IDs of special groups user is a member of
*/ */
private List<UUID> specialGroups; private Set<UUID> specialGroups;
/** /**
* Temporary store for the specialGroups when the current user is temporary switched * Temporary store for the specialGroups when the current user is temporary switched
*/ */
private List<UUID> specialGroupsPreviousState; private Set<UUID> specialGroupsPreviousState;
/** /**
* The currently used authentication method * The currently used authentication method
@@ -183,7 +184,7 @@ public class Context implements AutoCloseable {
extraLogInfo = ""; extraLogInfo = "";
ignoreAuth = false; ignoreAuth = false;
specialGroups = new ArrayList<>(); specialGroups = new HashSet<>();
authStateChangeHistory = new ConcurrentLinkedDeque<>(); authStateChangeHistory = new ConcurrentLinkedDeque<>();
authStateClassCallHistory = new ConcurrentLinkedDeque<>(); authStateClassCallHistory = new ConcurrentLinkedDeque<>();
@@ -656,6 +657,15 @@ public class Context implements AutoCloseable {
return myGroups; return myGroups;
} }
/**
* Get a set of all of the special groups uuids that current user is a member of.
*
* @return list of special groups uuids
*/
public Set<UUID> getSpecialGroupUuids() {
return CollectionUtils.isEmpty(specialGroups) ? Set.of() : specialGroups;
}
/** /**
* Temporary change the user bound to the context, empty the special groups that * Temporary change the user bound to the context, empty the special groups that
* are retained to allow subsequent restore * are retained to allow subsequent restore
@@ -673,7 +683,7 @@ public class Context implements AutoCloseable {
currentUserPreviousState = currentUser; currentUserPreviousState = currentUser;
specialGroupsPreviousState = specialGroups; specialGroupsPreviousState = specialGroups;
specialGroups = new ArrayList<>(); specialGroups = new HashSet<>();
currentUser = newUser; currentUser = newUser;
} }

View File

@@ -1174,7 +1174,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
//DO NOT ESCAPE RANGE QUERIES ! //DO NOT ESCAPE RANGE QUERIES !
if (!value.matches("\\[.*TO.*\\]")) { if (!value.matches("\\[.*TO.*\\]")) {
value = ClientUtils.escapeQueryChars(value); value = ClientUtils.escapeQueryChars(value);
filterQuery.append("(").append(value).append(")"); filterQuery.append("\"").append(value).append("\"");
} else { } else {
filterQuery.append(value); filterQuery.append(value);
} }

View File

@@ -113,9 +113,11 @@ public abstract class IndexFactoryImpl<T extends IndexableObject, S> implements
log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)."
+ " Only the first {} characters were indexed.", charLimit); + " Only the first {} characters were indexed.", charLimit);
} else { } else {
log.error("Tika parsing error. Could not index full text.", saxe);
throw new IOException("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe);
} }
} catch (TikaException ex) { } catch (TikaException ex) {
log.error("Tika parsing error. Could not index full text.", ex);
throw new IOException("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex);
} }

View File

@@ -569,4 +569,9 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
public int countTotal(Context context) throws SQLException { public int countTotal(Context context) throws SQLException {
return ePersonDAO.countRows(context); return ePersonDAO.countRows(context);
} }
@Override
public String getName(EPerson dso) {
return dso.getName();
}
} }

View File

@@ -829,4 +829,9 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
final MetadataField metadataField) throws SQLException { final MetadataField metadataField) throws SQLException {
return groupDAO.findByMetadataField(context, searchValue, metadataField); return groupDAO.findByMetadataField(context, searchValue, metadataField);
} }
@Override
public String getName(Group dso) {
return dso.getName();
}
} }

View File

@@ -6,7 +6,7 @@
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.cache; package org.dspace.iiif.logger;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;

View File

@@ -5,7 +5,7 @@
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.rest.cache; package org.dspace.iiif.logger;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ads;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the ADS metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class ADSFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "adsMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,334 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.ads;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying ADS
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<String>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String resultFieldList;
private String apiKey;
private int timeout = 1000;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "ads";
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
return retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for CrossRef");
}
@Override
public void init() throws Exception {}
public String getApiKey() {
return apiKey;
}
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
/**
* This class is a Callable implementation to get ADS entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
return search(query.getParameterAsClass("query", String.class),
query.getParameterAsClass("start", Integer.class),
query.getParameterAsClass("count", Integer.class),
getApiKey());
}
}
/**
* This class is a Callable implementation to get an ADS entry using bibcode
* The bibcode to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
String queryString = "bibcode:" + query.getParameterAsClass("id", String.class);
return search(queryString, 0 , 1, getApiKey());
}
}
/**
* This class is a Callable implementation to search ADS entries
* using author and title and year.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String author = query.getParameterAsClass("author", String.class);
String title = query.getParameterAsClass("title", String.class);
Integer year = query.getParameterAsClass("year", Integer.class);
return search(title, author, year, start, count, getApiKey());
}
}
/**
* This class is a Callable implementation to count the number of entries for an ADS query.
* This Callable use as query value to ADS the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
return count(query.getParameterAsClass("query", String.class), getApiKey());
}
}
private List<ImportRecord> search(String title, String author, int year, int start, int count, String token) {
String query = "";
if (StringUtils.isNotBlank(title)) {
query += "title:" + title;
}
if (StringUtils.isNotBlank(author)) {
String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)";
String[] authors = author.split(splitRegex);
// [FAU]
if (StringUtils.isNotBlank(query)) {
query = "author:";
} else {
query += "&fq=author:";
}
int x = 0;
for (String auth : authors) {
x++;
query += auth;
if (x < authors.length) {
query += " AND ";
}
}
}
if (year != -1) {
// [DP]
if (StringUtils.isNotBlank(query)) {
query = "year:";
} else {
query += "&fq=year:";
}
query += year;
}
return search(query.toString(), start, count, token);
}
public Integer count(String query, String token) {
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + token);
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("q", query);
uriBuilder.addParameter("rows", "1");
uriBuilder.addParameter("start", "0");
uriBuilder.addParameter("fl", this.resultFieldList);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
return jsonNode.at("/response/numFound").asInt();
} catch (URISyntaxException e) {
e.printStackTrace();
}
return 0;
}
public List<ImportRecord> search(String query, Integer start, Integer count, String token) {
List<ImportRecord> adsResults = new ArrayList<>();
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + token);
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.url);
uriBuilder.addParameter("q", query);
uriBuilder.addParameter("rows", count.toString());
uriBuilder.addParameter("start", start.toString());
uriBuilder.addParameter("fl", this.resultFieldList);
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
JsonNode docs = jsonNode.at("/response/docs");
if (docs.isArray()) {
Iterator<JsonNode> nodes = docs.elements();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
adsResults.add(transformSourceRecords(node.toString()));
}
} else {
adsResults.add(transformSourceRecords(docs.toString()));
}
} catch (URISyntaxException e) {
e.printStackTrace();
}
return adsResults;
}
private JsonNode convertStringJsonToJsonNode(String json) {
try {
return new ObjectMapper().readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return null;
}
public void setUrl(String url) {
this.url = url;
}
public void setResultFieldList(String resultFieldList) {
this.resultFieldList = resultFieldList;
}
}

View File

@@ -0,0 +1,67 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.crossref;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor;
/**
* This class is used for CrossRef's Live-Import to extract
* attributes such as "given" and "family" from the array of authors/editors
* and return them concatenated.
* Beans are configured in the crossref-integration.xml file.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class CrossRefAuthorMetadataProcessor implements JsonPathMetadataProcessor {
private final static Logger log = LogManager.getLogger();
private String pathToArray;
@Override
public Collection<String> processMetadata(String json) {
JsonNode rootNode = convertStringJsonToJsonNode(json);
Iterator<JsonNode> authors = rootNode.at(pathToArray).iterator();
Collection<String> values = new ArrayList<>();
while (authors.hasNext()) {
JsonNode author = authors.next();
String givenName = author.at("/given").textValue();
String familyName = author.at("/family").textValue();
if (StringUtils.isNoneBlank(givenName) && StringUtils.isNoneBlank(familyName)) {
values.add(givenName + " " + familyName);
}
}
return values;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
public void setPathToArray(String pathToArray) {
this.pathToArray = pathToArray;
}
}

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.crossref;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the CrossRef metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class CrossRefFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "crossrefMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,336 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.crossref;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.DoiCheck;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying CrossRef
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<String>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public String getImportSource() {
return "crossref";
}
@Override
public void init() throws Exception {}
@Override
public ImportRecord getRecord(String recordId) throws MetadataSourceException {
String id = getID(recordId);
List<ImportRecord> records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id))
: retry(new SearchByIdCallable(recordId));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
String id = getID(query);
return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
String id = getID(query.toString());
return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
String id = getID(query.toString());
return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id))
: retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
String id = getID(query.toString());
if (StringUtils.isNotBlank(id)) {
return retry(new SearchByIdCallable(id));
}
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
String id = getID(query.toString());
List<ImportRecord> records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id))
: retry(new SearchByIdCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
String id = getID(query.toString());
return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id))
: retry(new FindMatchingRecordCallable(query));
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for CrossRef");
}
public String getID(String id) {
return DoiCheck.isDoi(id) ? "filter=doi:" + id : StringUtils.EMPTY;
}
/**
* This class is a Callable implementation to get CrossRef entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
URIBuilder uriBuilder = new URIBuilder(url);
uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class));
if (Objects.nonNull(count)) {
uriBuilder.addParameter("rows", count.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("offset", start.toString());
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(response);
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
results.add(transformSourceRecords(node.toString()));
}
return results;
}
}
/**
* This class is a Callable implementation to get an CrossRef entry using DOI
* The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByIdCallable(Query query) {
this.query = query;
}
private SearchByIdCallable(String id) {
this.query = new Query();
query.addParameter("id", id);
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8");
URIBuilder uriBuilder = new URIBuilder(url + "/" + ID);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
JsonNode messageNode = jsonNode.at("/message");
results.add(transformSourceRecords(messageNode.toString()));
return results;
}
}
/**
* This class is a Callable implementation to search CrossRef entries using author and title.
* There are two field in the Query map to pass, with keys "title" and "author"
* (at least one must be used).
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class FindMatchingRecordCallable implements Callable<List<ImportRecord>> {
private Query query;
private FindMatchingRecordCallable(Query q) {
query = q;
}
@Override
public List<ImportRecord> call() throws Exception {
String queryValue = query.getParameterAsClass("query", String.class);
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
String author = query.getParameterAsClass("author", String.class);
String title = query.getParameterAsClass("title", String.class);
String bibliographics = query.getParameterAsClass("bibliographics", String.class);
List<ImportRecord> results = new ArrayList<>();
URIBuilder uriBuilder = new URIBuilder(url);
if (Objects.nonNull(queryValue)) {
uriBuilder.addParameter("query", queryValue);
}
if (Objects.nonNull(count)) {
uriBuilder.addParameter("rows", count.toString());
}
if (Objects.nonNull(start)) {
uriBuilder.addParameter("offset", start.toString());
}
if (Objects.nonNull(author)) {
uriBuilder.addParameter("query.author", author);
}
if (Objects.nonNull(title )) {
uriBuilder.addParameter("query.container-title", title);
}
if (Objects.nonNull(bibliographics)) {
uriBuilder.addParameter("query.bibliographic", bibliographics);
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(resp);
Iterator<JsonNode> nodes = jsonNode.at("/message/items").iterator();
while (nodes.hasNext()) {
JsonNode node = nodes.next();
results.add(transformSourceRecords(node.toString()));
}
return results;
}
}
/**
* This class is a Callable implementation to count the number of entries for an CrossRef query.
* This Callable use as query value to CrossRef the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
private CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
private CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(url);
uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class));
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
return jsonNode.at("/message/total-results").asInt();
}
}
/**
* This class is a Callable implementation to check if exist an CrossRef entry using DOI.
* The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
* return 1 if CrossRef entry exists otherwise 0
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class DoiCheckCallable implements Callable<Integer> {
private final Query query;
private DoiCheckCallable(final String id) {
final Query query = new Query();
query.addParameter("id", id);
this.query = query;
}
private DoiCheckCallable(final Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class));
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode jsonNode = convertStringJsonToJsonNode(responseString);
return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0;
}
}
private JsonNode convertStringJsonToJsonNode(String json) {
try {
return new ObjectMapper().readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return null;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.epo.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Epo metadatum fields on the DSpace metadatum fields
*
* @author Pasquale Cavallo (pasquale.cavallo at 4science dot it)
*/
public class EpoFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@Resource(name = "epoMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,541 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.epo.service;
import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpException;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.xerces.impl.dv.util.Base64;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.contributor.EpoIdMetadataContributor.EpoDocumentId;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.jaxen.JaxenException;
import org.jdom2.Attribute;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.input.SAXBuilder;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying EPO
*
* @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it)
*/
public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Element>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String authUrl;
private String searchUrl;
private String consumerKey;
private String consumerSecret;
private MetadataFieldConfig dateFiled;
private MetadataFieldConfig applicationNumber;
public static final String APP_NO_DATE_SEPARATOR = "$$$";
private static final String APP_NO_DATE_SEPARATOR_REGEX = "\\$\\$\\$";
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
/**
* The string that identifies this import implementation. Preferable a URI
*
* @return the identifying uri
*/
@Override
public String getImportSource() {
return "epo";
}
/**
* Set the customer epo key
* @param consumerKey the customer consumer key
*/
public void setConsumerKey(String consumerKey) {
this.consumerKey = consumerKey;
}
public String getConsumerKey() {
return consumerKey;
}
/**
* Set the costumer epo secret
* @param consumerSecret the customer epo secret
*/
public void setConsumerSecret(String consumerSecret) {
this.consumerSecret = consumerSecret;
}
public String getConsumerSecret() {
return consumerSecret;
}
public void setDateFiled(MetadataFieldConfig dateFiled) {
this.dateFiled = dateFiled;
}
public MetadataFieldConfig getDateFiled() {
return dateFiled;
}
public void setApplicationNumber(MetadataFieldConfig applicationNumber) {
this.applicationNumber = applicationNumber;
}
public MetadataFieldConfig getApplicationNumber() {
return applicationNumber;
}
/***
* Log to EPO, bearer is valid for 20 minutes
*
* @param consumerKey The consumer Key
* @param consumerSecretKey The consumer secret key
* @return
* @throws IOException
* @throws HttpException
*/
protected String login() throws IOException, HttpException {
Map<String, Map<String, String>> params = getLoginParams();
String entity = "grant_type=client_credentials";
String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity);
ObjectMapper mapper = new ObjectMapper(new JsonFactory());
JsonNode rootNode = mapper.readTree(json);
JsonNode accessTokenNode = rootNode.get("access_token");
return accessTokenNode.asText();
}
private Map<String, Map<String, String>> getLoginParams() {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParams = getLoginHeaderParams();
params.put(HEADER_PARAMETERS, headerParams);
return params;
}
private Map<String, String> getLoginHeaderParams() {
Map<String, String> params = new HashMap<String, String>();
String authString = consumerKey + ":" + consumerSecret;
params.put("Authorization", "Basic " + Base64.encode(authString.getBytes()));
params.put("Content-type", "application/x-www-form-urlencoded");
return params;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new CountRecordsCallable(query, bearer));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return 0;
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new CountRecordsCallable(query, bearer));
} catch (IOException | HttpException e) {
e.printStackTrace();
}
}
return 0;
}
@Override
public Collection<ImportRecord> getRecords(String query, int start,
int count) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new SearchByQueryCallable(query, bearer, start, count));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return new ArrayList<ImportRecord>();
}
@Override
public Collection<ImportRecord> getRecords(Query query)
throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
return retry(new SearchByQueryCallable(query, bearer));
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return new ArrayList<ImportRecord>();
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
try {
String bearer = login();
List<ImportRecord> list = retry(new SearchByIdCallable(id, bearer));
return CollectionUtils.isNotEmpty(list) ? list.get(0) : null;
} catch (IOException | HttpException e) {
log.warn(e.getMessage());
throw new RuntimeException(e.getMessage(), e);
}
}
return null;
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
return null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item)
throws MetadataSourceException {
return null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query)
throws MetadataSourceException {
return null;
}
/**
* This class is a Callable implementation to count the number of entries for an EPO query.
* This Callable use as query value to EPO the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountRecordsCallable implements Callable<Integer> {
private String bearer;
private String query;
private CountRecordsCallable(Query query, String bearer) {
this.query = query.getParameterAsClass("query", String.class);
this.bearer = bearer;
}
private CountRecordsCallable(String query, String bearer) {
this.query = query;
this.bearer = bearer;
}
public Integer call() throws Exception {
return countDocument(bearer, query);
}
}
/**
* This class is a Callable implementation to get an EPO entry using epodocID (epodoc:AB1234567T)
* The epodocID to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByIdCallable implements Callable<List<ImportRecord>> {
private String id;
private String bearer;
private SearchByIdCallable(String id, String bearer) {
this.id = id;
this.bearer = bearer;
}
public List<ImportRecord> call() throws Exception {
int positionToSplit = id.indexOf(":");
String docType = EpoDocumentId.EPODOC;
String idS = id;
if (positionToSplit != -1) {
docType = id.substring(0, positionToSplit);
idS = id.substring(positionToSplit + 1, id.length());
} else if (id.contains(APP_NO_DATE_SEPARATOR)) {
// special case the id is the combination of the applicationnumber and date filed
String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0];
SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10);
List<ImportRecord> records = search.call().stream()
.filter(r -> r.getValue(dateFiled.getSchema(), dateFiled.getElement(),
dateFiled.getQualifier())
.stream()
.anyMatch(m -> StringUtils.equals(m.getValue(),
id.split(APP_NO_DATE_SEPARATOR_REGEX)[1])
))
.limit(1).collect(Collectors.toList());
return records;
}
List<ImportRecord> records = searchDocument(bearer, idS, docType);
if (records.size() > 1) {
log.warn("More record are returned with epocID " + id);
}
return records;
}
}
/**
* This class is a Callable implementation to get EPO entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private Integer start;
private Integer count;
private String bearer;
private SearchByQueryCallable(Query query, String bearer) {
this.query = query;
this.bearer = bearer;
}
public SearchByQueryCallable(String queryValue, String bearer, int start, int count) {
this.query = new Query();
query.addParameter("query", queryValue);
this.start = query.getParameterAsClass("start", Integer.class) != null ?
query.getParameterAsClass("start", Integer.class) : 0;
this.count = query.getParameterAsClass("count", Integer.class) != null ?
query.getParameterAsClass("count", Integer.class) : 20;
this.bearer = bearer;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> records = new ArrayList<ImportRecord>();
String queryString = query.getParameterAsClass("query", String.class);
if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) {
if (StringUtils.isNotBlank(queryString) && StringUtils.isNotBlank(bearer)) {
List<EpoDocumentId> epoDocIds = searchDocumentIds(bearer, queryString, start + 1, count);
for (EpoDocumentId epoDocId : epoDocIds) {
List<ImportRecord> recordfounds = searchDocument(bearer, epoDocId);
if (recordfounds.size() > 1) {
log.warn("More record are returned with epocID " + epoDocId.toString());
}
records.addAll(recordfounds);
}
}
}
return records;
}
}
private Integer countDocument(String bearer, String query) {
if (StringUtils.isBlank(bearer)) {
return null;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
headerParameters.put("X-OPS-Range", "1-1");
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.searchUrl);
uriBuilder.addParameter("q", query);
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
String totalRes = getElement(root, namespaces, "//ops:biblio-search/@total-result-count");
return Integer.parseInt(totalRes);
} catch (JDOMException | IOException | URISyntaxException | JaxenException e) {
log.error(e.getMessage(), e);
return null;
}
}
private List<EpoDocumentId> searchDocumentIds(String bearer, String query, int start, int count) {
List<EpoDocumentId> results = new ArrayList<EpoDocumentId>();
int end = start + count;
if (StringUtils.isBlank(bearer)) {
return results;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
if (start >= 1 && end > start) {
headerParameters.put("X-OPS-Range", start + "-" + end);
}
params.put(HEADER_PARAMETERS, headerParameters);
URIBuilder uriBuilder = new URIBuilder(this.searchUrl);
uriBuilder.addParameter("q", query);
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(response));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance()
.compile("//ns:document-id", Filters.element(), null, namespaces);
List<Element> documentIds = xpath.evaluate(root);
for (Element documentId : documentIds) {
results.add(new EpoDocumentId(documentId, namespaces));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return results;
}
private List<ImportRecord> searchDocument(String bearer, EpoDocumentId id) {
return searchDocument(bearer, id.getId(), id.getDocumentIdType());
}
private List<ImportRecord> searchDocument(String bearer, String id, String docType) {
List<ImportRecord> results = new ArrayList<ImportRecord>();
if (StringUtils.isBlank(bearer)) {
return results;
}
try {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
Map<String, String> headerParameters = new HashMap<String, String>();
headerParameters.put("Authorization", "Bearer " + bearer);
params.put(HEADER_PARAMETERS, headerParameters);
String url = this.url.replace("$(doctype)", docType).replace("$(id)", id);
String response = liveImportClient.executeHttpGetRequest(1000, url, params);
List<Element> elements = splitToRecords(response);
for (Element element : elements) {
results.add(transformSourceRecords(element));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return results;
}
private List<Element> splitToRecords(String recordsSrc) {
try {
SAXBuilder saxBuilder = new SAXBuilder();
Document document = saxBuilder.build(new StringReader(recordsSrc));
Element root = document.getRootElement();
List<Namespace> namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance().compile("//ns:exchange-document",
Filters.element(), null, namespaces);
List<Element> recordsList = xpath.evaluate(root);
return recordsList;
} catch (JDOMException | IOException e) {
log.error(e.getMessage(), e);
return new LinkedList<Element>();
}
}
private String getElement(Element document, List<Namespace> namespaces, String path) throws JaxenException {
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(document);
//exactly one element expected for any field
if (CollectionUtils.isEmpty(nodes)) {
return StringUtils.EMPTY;
} else {
return getValue(nodes.get(0));
}
}
private String getValue(Object el) {
if (el instanceof Element) {
return ((Element) el).getText();
} else if (el instanceof Attribute) {
return ((Attribute) el).getValue();
} else if (el instanceof String) {
return (String)el;
} else if (el instanceof Text) {
return ((Text) el).getText();
} else {
log.error("node of type: " + el.getClass());
return "";
}
}
public void setUrl(String url) {
this.url = url;
}
public void setAuthUrl(String authUrl) {
this.authUrl = authUrl;
}
public void setSearchUrl(String searchUrl) {
this.searchUrl = searchUrl;
}
}

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.liveimportclient.service;
import java.util.Map;
/**
* Interface for classes that allow to contact LiveImport clients.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public interface LiveImportClient {
/**
* Http GET request
*
* @param timeout The connect timeout in milliseconds
* @param URL URL
* @param requestParams This map contains the parameters to be included in the request.
* Each parameter will be added to the url?(key=value)
* @return The response in String type converted from InputStream
*/
public String executeHttpGetRequest(int timeout, String URL, Map<String, Map<String, String>> params);
/**
* Http POST request
*
* @param URL URL
* @param params This map contains the header params to be included in the request.
* @param entry the entity value
* @return the response in String type converted from InputStream
*/
public String executeHttpPostRequest(String URL, Map<String, Map<String, String>> params, String entry);
}

View File

@@ -0,0 +1,187 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.liveimportclient.service;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.config.RequestConfig.Builder;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link LiveImportClient}.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com)
*/
public class LiveImportClientImpl implements LiveImportClient {
private final static Logger log = LogManager.getLogger();
public static final String URI_PARAMETERS = "uriParameters";
public static final String HEADER_PARAMETERS = "headerParameters";
private CloseableHttpClient httpClient;
@Autowired
private ConfigurationService configurationService;
@Override
public String executeHttpGetRequest(int timeout, String URL, Map<String, Map<String, String>> params) {
HttpGet method = null;
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
.orElseGet(HttpClients::createDefault)) {
Builder requestConfigBuilder = RequestConfig.custom();
requestConfigBuilder.setConnectionRequestTimeout(timeout);
RequestConfig defaultRequestConfig = requestConfigBuilder.build();
method = new HttpGet(buildUrl(URL, params.get(URI_PARAMETERS)));
method.setConfig(defaultRequestConfig);
Map<String, String> headerParams = params.get(HEADER_PARAMETERS);
if (MapUtils.isNotEmpty(headerParams)) {
for (String param : headerParams.keySet()) {
method.setHeader(param, headerParams.get(param));
}
}
configureProxy(method, defaultRequestConfig);
HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException();
}
InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset());
} catch (Exception e1) {
log.error(e1.getMessage(), e1);
} finally {
if (Objects.nonNull(method)) {
method.releaseConnection();
}
}
return StringUtils.EMPTY;
}
@Override
public String executeHttpPostRequest(String URL, Map<String, Map<String, String>> params, String entry) {
HttpPost method = null;
try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient)
.orElseGet(HttpClients::createDefault)) {
Builder requestConfigBuilder = RequestConfig.custom();
RequestConfig defaultRequestConfig = requestConfigBuilder.build();
method = new HttpPost(buildUrl(URL, params.get(URI_PARAMETERS)));
method.setConfig(defaultRequestConfig);
if (StringUtils.isNotBlank(entry)) {
method.setEntity(new StringEntity(entry));
}
setHeaderParams(method, params);
configureProxy(method, defaultRequestConfig);
HttpResponse httpResponse = httpClient.execute(method);
if (isNotSuccessfull(httpResponse)) {
throw new RuntimeException();
}
InputStream inputStream = httpResponse.getEntity().getContent();
return IOUtils.toString(inputStream, Charset.defaultCharset());
} catch (Exception e1) {
log.error(e1.getMessage(), e1);
} finally {
if (Objects.nonNull(method)) {
method.releaseConnection();
}
}
return StringUtils.EMPTY;
}
private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) {
String proxyHost = configurationService.getProperty("http.proxy.host");
String proxyPort = configurationService.getProperty("http.proxy.port");
if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) {
RequestConfig requestConfig = RequestConfig.copy(defaultRequestConfig)
.setProxy(new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http"))
.build();
method.setConfig(requestConfig);
}
}
/**
* Allows to set the header parameters to the HTTP Post method
*
* @param method HttpPost method
* @param params This map contains the header params to be included in the request.
*/
private void setHeaderParams(HttpPost method, Map<String, Map<String, String>> params) {
Map<String, String> headerParams = params.get(HEADER_PARAMETERS);
if (MapUtils.isNotEmpty(headerParams)) {
for (String param : headerParams.keySet()) {
method.setHeader(param, headerParams.get(param));
}
}
}
/**
* This method allows you to add the parameters contained in the requestParams map to the URL
*
* @param URL URL
* @param requestParams This map contains the parameters to be included in the request.
* Each parameter will be added to the url?(key=value)
* @return
* @throws URISyntaxException
*/
private String buildUrl(String URL, Map<String, String> requestParams) throws URISyntaxException {
URIBuilder uriBuilder = new URIBuilder(URL);
if (MapUtils.isNotEmpty(requestParams)) {
for (String param : requestParams.keySet()) {
uriBuilder.setParameter(param, requestParams.get(param));
}
}
return uriBuilder.toString();
}
private boolean isNotSuccessfull(HttpResponse response) {
int statusCode = getStatusCode(response);
return statusCode < 200 || statusCode > 299;
}
private int getStatusCode(HttpResponse response) {
return response.getStatusLine().getStatusCode();
}
public CloseableHttpClient getHttpClient() {
return httpClient;
}
public void setHttpClient(CloseableHttpClient httpClient) {
this.httpClient = httpClient;
}
}

View File

@@ -0,0 +1,82 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* This Processor allows to extract attribute values of an array.
* For exaple to extract all values of secondAttribute,
* "array":[
* {
* "firstAttribute":"first value",
* "secondAttribute":"second value"
* },
* {
* "firstAttribute":"first value",
* "secondAttribute":"second value"
* }
* ]
*
* it's possible configure a bean with
* pathToArray=/array and elementAttribute=/secondAttribute
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class ArrayElementAttributeProcessor implements JsonPathMetadataProcessor {
private final static Logger log = LogManager.getLogger();
private String pathToArray;
private String elementAttribute;
@Override
public Collection<String> processMetadata(String json) {
JsonNode rootNode = convertStringJsonToJsonNode(json);
Iterator<JsonNode> array = rootNode.at(pathToArray).iterator();
Collection<String> values = new ArrayList<>();
while (array.hasNext()) {
JsonNode element = array.next();
String value = element.at(elementAttribute).textValue();
if (StringUtils.isNoneBlank(value)) {
values.add(value);
}
}
return values;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
public void setPathToArray(String pathToArray) {
this.pathToArray = pathToArray;
}
public void setElementAttribute(String elementAttribute) {
this.elementAttribute = elementAttribute;
}
}

View File

@@ -0,0 +1,312 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Resource;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jaxen.JaxenException;
import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
import org.springframework.beans.factory.annotation.Required;
/**
* Custom MetadataContributor to manage Epo ID.
* Need as input <publication-reference> element and all children.
*
* @author Pasquale Cavallo
*/
public class EpoIdMetadataContributor implements MetadataContributor<Element> {
protected MetadataFieldConfig field;
private boolean needType;
/**
* This property will be used in ID definition.
* If this is true, id will be in the form docType:EpoID, otherwise EpoID will be returned
*
* @param needType if true, docType will be included in id definition
*/
public void setNeedType(boolean needType) {
this.needType = needType;
}
/**
* Return prefixToNamespaceMapping
*
* @return a prefixToNamespaceMapping map
*/
public Map<String, String> getPrefixToNamespaceMapping() {
return prefixToNamespaceMapping;
}
protected MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
/**
* Return metadataFieldMapping
*
* @return MetadataFieldMapping
*/
public MetadataFieldMapping<Element, MetadataContributor<Element>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
/**
* Set the metadataFieldMapping of this SimpleXpathMetadatumContributor
*
* @param metadataFieldMapping the new mapping.
*/
public void setMetadataFieldMapping(
MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
/**
* Set the prefixToNamespaceMapping for this object,
*
* @param prefixToNamespaceMapping the new mapping.
*/
@Resource(name = "isiFullprefixMapping")
public void setPrefixToNamespaceMapping(Map<String, String> prefixToNamespaceMapping) {
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
}
protected Map<String, String> prefixToNamespaceMapping;
/**
* Initialize EpoIdMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig
*
* @param query query string
* @param prefixToNamespaceMapping metadata prefix to namespace mapping
* @param field
* <a href="https://github.com/DSpace/DSpace/tree/master/dspace-api/src/main/java/org/dspace/importer/external#metadata-mapping-">MetadataFieldConfig</a>
*/
public EpoIdMetadataContributor(String query, Map<String, String> prefixToNamespaceMapping,
MetadataFieldConfig field) {
this.query = query;
this.prefixToNamespaceMapping = prefixToNamespaceMapping;
this.field = field;
}
/**
* Empty constructor for EpoIdMetadataContributor
*/
public EpoIdMetadataContributor() {
}
protected String query;
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
*
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() {
return field;
}
/**
* Setting the MetadataFieldConfig
*
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
@Required
public void setField(MetadataFieldConfig field) {
this.field = field;
}
/**
* Return query used to create an xpathExpression on, this query is used to
*
* @return the query this instance is based on
*/
public String getQuery() {
return query;
}
@Required
public void setQuery(String query) {
this.query = query;
}
/**
* Retrieve the metadata associated with the given object.
* Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO
* list
*
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
try {
List<Namespace> namespaces = Arrays.asList(
Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"),
Namespace.getNamespace("ops", "http://ops.epo.org"),
Namespace.getNamespace("ns", "http://www.epo.org/exchange"));
XPathExpression<Element> xpath = XPathFactory.instance().compile(query, Filters.element(), null,
namespaces);
List<Element> elements = xpath.evaluate(element);
for (Element el : elements) {
EpoDocumentId document = new EpoDocumentId(el, namespaces);
MetadatumDTO metadatum = new MetadatumDTO();
metadatum.setElement(field.getElement());
metadatum.setQualifier(field.getQualifier());
metadatum.setSchema(field.getSchema());
if (needType) {
metadatum.setValue(document.getIdAndType());
} else {
metadatum.setValue(document.getId());
}
values.add(metadatum);
}
return values;
} catch (JaxenException e) {
System.err.println(query);
throw new RuntimeException(e);
}
}
/**
* This class maps EPO's response metadata needs to extract epo ID.
*
* @author Pasquale Cavallo
*
*/
public static class EpoDocumentId {
private String documentIdType;
private String country;
private String docNumber;
private String kind;
private String date;
private List<Namespace> namespaces;
public static final String DOCDB = "docdb";
public static final String EPODOC = "epodoc";
public static final String ORIGIN = "origin";
public EpoDocumentId(Element documentId, List<Namespace> namespaces) throws JaxenException {
this.namespaces = namespaces;
Element preferredId = null;
XPathExpression<Object> xpath = XPathFactory.instance().compile(
"./ns:document-id[@document-id-type=\"epodoc\"]", Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(documentId);
if (CollectionUtils.isNotEmpty(nodes)) {
preferredId = (Element) nodes.get(0);
}
if (Objects.isNull(preferredId)) {
preferredId = documentId;
}
this.documentIdType = buildDocumentIdType(preferredId);
this.country = buildCountry(preferredId);
this.docNumber = buildDocNumber(preferredId);
this.kind = buildKind(preferredId);
this.date = buildDate(preferredId);
}
private String buildDocumentIdType(Element documentId) throws JaxenException {
return getElement(documentId, "./@document-id-type");
}
private String buildCountry(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:country");
}
private String buildDocNumber(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:doc-number");
}
private String buildKind(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:kind");
}
private String buildDate(Element documentId) throws JaxenException {
return getElement(documentId, "./ns:date");
}
public String getDocumentIdType() {
return documentIdType;
}
/**
* This method compute the epo ID from fields
*
* @return the EPO id
*/
public String getId() {
if (DOCDB.equals(documentIdType)) {
return country + "." + docNumber + "." + kind;
} else if (EPODOC.equals(documentIdType)) {
return docNumber + ((kind != null) ? kind : StringUtils.EMPTY);
} else {
return StringUtils.EMPTY;
}
}
public String getIdAndType() {
if (EPODOC.equals(documentIdType)) {
return documentIdType + ":" + docNumber + ((kind != null) ? kind : "");
} else if (DOCDB.equals(documentIdType)) {
return documentIdType + ":" + country + "." + docNumber + "." + kind;
} else {
return StringUtils.EMPTY;
}
}
private String getElement(Element documentId, String path) throws JaxenException {
if (Objects.isNull(documentId)) {
return StringUtils.EMPTY;
}
XPathExpression<Object> xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null,
namespaces);
List<Object> nodes = xpath.evaluate(documentId);
//exactly one element expected for any field
return CollectionUtils.isNotEmpty(nodes) ? getValue(nodes.get(0)) : StringUtils.EMPTY;
}
private String getValue(Object el) {
if (el instanceof Element) {
return ((Element) el).getText();
} else if (el instanceof Attribute) {
return ((Attribute) el).getValue();
} else if (el instanceof String) {
return (String)el;
} else if (el instanceof Text) {
return ((Text) el).getText();
} else {
return StringUtils.EMPTY;
}
}
}
}

View File

@@ -0,0 +1,23 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
/**
* Service interface class for processing json object.
* The implementation of this class is responsible for all business logic calls
* for extracting of values from json object.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public interface JsonPathMetadataProcessor {
public Collection<String> processMetadata(String json);
}

View File

@@ -0,0 +1,87 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* This Processor allows to extract all values of a matrix.
* Only need to configure the path to the matrix in "pathToMatrix"
* For exaple to extract all values
* "matrix": [
* [
* "first",
* "second"
* ],
* [
* "third"
* ],
* [
* "fourth",
* "fifth"
* ]
* ],
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class MatrixElementProcessor implements JsonPathMetadataProcessor {
private final static Logger log = LogManager.getLogger();
private String pathToMatrix;
@Override
public Collection<String> processMetadata(String json) {
JsonNode rootNode = convertStringJsonToJsonNode(json);
Iterator<JsonNode> array = rootNode.at(pathToMatrix).elements();
Collection<String> values = new ArrayList<>();
while (array.hasNext()) {
JsonNode element = array.next();
if (element.isArray()) {
Iterator<JsonNode> nodes = element.iterator();
while (nodes.hasNext()) {
String nodeValue = nodes.next().textValue();
if (StringUtils.isNotBlank(nodeValue)) {
values.add(nodeValue);
}
}
} else {
String nodeValue = element.textValue();
if (StringUtils.isNotBlank(nodeValue)) {
values.add(nodeValue);
}
}
}
return values;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
public void setPathToMatrix(String pathToMatrix) {
this.pathToMatrix = pathToMatrix;
}
}

View File

@@ -0,0 +1,181 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Objects;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* A simple JsonPath Metadata processor
* that allow extract value from json object
* by configuring the path in the query variable via the bean.
* moreover this can also perform more compact extractions
* by configuring specific json processor in "metadataProcessor"
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class SimpleJsonPathMetadataContributor implements MetadataContributor<String> {
private final static Logger log = LogManager.getLogger();
private String query;
private MetadataFieldConfig field;
protected JsonPathMetadataProcessor metadataProcessor;
/**
* Initialize SimpleJsonPathMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig
*
* @param query The JSonPath query
* @param field the matadata field to map the result of the Json path query
* <a href="https://github.com/DSpace/DSpace/tree/master/dspace-api/src/main/java/org/dspace/importer/external#metadata-mapping-">MetadataFieldConfig</a>
*/
public SimpleJsonPathMetadataContributor(String query, MetadataFieldConfig field) {
this.query = query;
this.field = field;
}
/**
* Unused by this implementation
*/
@Override
public void setMetadataFieldMapping(MetadataFieldMapping<String, MetadataContributor<String>> rt) {
}
/**
* Empty constructor for SimpleJsonPathMetadataContributor
*/
public SimpleJsonPathMetadataContributor() {
}
/**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO
*
* @return MetadataFieldConfig
*/
public MetadataFieldConfig getField() {
return field;
}
/**
* Setting the MetadataFieldConfig
*
* @param field MetadataFieldConfig used while retrieving MetadatumDTO
*/
public void setField(MetadataFieldConfig field) {
this.field = field;
}
/**
* Return query used to create the JSonPath
*
* @return the query this instance is based on
*/
public String getQuery() {
return query;
}
/**
* Return query used to create the JSonPath
*
*/
public void setQuery(String query) {
this.query = query;
}
/**
* Used to process data got by jsonpath expression, like arrays to stringify, change date format or else
* If it is null, toString will be used.
*
* @param metadataProcessor
*/
public void setMetadataProcessor(JsonPathMetadataProcessor metadataProcessor) {
this.metadataProcessor = metadataProcessor;
}
/**
* Retrieve the metadata associated with the given object.
* The toString() of the resulting object will be used.
*
* @param t A class to retrieve metadata from.
* @return a collection of import records. Only the identifier of the found records may be put in the record.
*/
@Override
public Collection<MetadatumDTO> contributeMetadata(String fullJson) {
Collection<MetadatumDTO> metadata = new ArrayList<>();
Collection<String> metadataValue = new ArrayList<>();
if (Objects.nonNull(metadataProcessor)) {
metadataValue = metadataProcessor.processMetadata(fullJson);
} else {
JsonNode jsonNode = convertStringJsonToJsonNode(fullJson);
JsonNode node = jsonNode.at(query);
if (node.isArray()) {
Iterator<JsonNode> nodes = node.iterator();
while (nodes.hasNext()) {
String nodeValue = getStringValue(nodes.next());
if (StringUtils.isNotBlank(nodeValue)) {
metadataValue.add(nodeValue);
}
}
} else if (!node.isNull() && StringUtils.isNotBlank(node.toString())) {
String nodeValue = getStringValue(node);
if (StringUtils.isNotBlank(nodeValue)) {
metadataValue.add(nodeValue);
}
}
}
for (String value : metadataValue) {
MetadatumDTO metadatumDto = new MetadatumDTO();
metadatumDto.setValue(value);
metadatumDto.setElement(field.getElement());
metadatumDto.setQualifier(field.getQualifier());
metadatumDto.setSchema(field.getSchema());
metadata.add(metadatumDto);
}
return metadata;
}
private String getStringValue(JsonNode node) {
if (node.isTextual()) {
return node.textValue();
}
if (node.isNumber()) {
return node.numberValue().toString();
}
log.error("It wasn't possible to convert the value of the following JsonNode:" + node.asText());
return StringUtils.EMPTY;
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
}

View File

@@ -0,0 +1,59 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* This contributor extends SimpleRisToMetadataContributor,
* in particular, this one is able to chain multi values into a single one
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/
public class SimpleRisToMetadataConcatContributor extends SimpleRisToMetadataContributor {
private String tag;
private MetadataFieldConfig metadata;
@Override
public Collection<MetadatumDTO> contributeMetadata(Map<String, List<String>> record) {
List<MetadatumDTO> values = new LinkedList<>();
List<String> fieldValues = record.get(this.tag);
Optional.ofNullable(fieldValues)
.map(fv -> fv.stream())
.map(s -> s.collect(Collectors.joining(" ")))
.ifPresent(t -> values.add(this.metadataFieldMapping.toDCValue(this.metadata, t)));
return values;
}
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
public MetadataFieldConfig getMetadata() {
return metadata;
}
public void setMetadata(MetadataFieldConfig metadata) {
this.metadata = metadata;
}
}

View File

@@ -0,0 +1,71 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadataFieldMapping;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
/**
* Metadata contributor that takes a record defined as Map<String,List<String>>
* and turns it into metadatums configured in fieldToMetadata
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it)
*/
public class SimpleRisToMetadataContributor implements MetadataContributor<Map<String,List<String>>> {
protected Map<String, MetadataFieldConfig> fieldToMetadata;
protected MetadataFieldMapping<Map<String,List<String>>,
MetadataContributor<Map<String,List<String>>>> metadataFieldMapping;
public SimpleRisToMetadataContributor() {}
public SimpleRisToMetadataContributor(Map<String, MetadataFieldConfig> fieldToMetadata) {
this.fieldToMetadata = fieldToMetadata;
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Map<String, List<String>> record) {
List<MetadatumDTO> values = new LinkedList<>();
for (String field : fieldToMetadata.keySet()) {
List<String> fieldValues = record.get(field);
if (Objects.nonNull(fieldValues)) {
for (String value : fieldValues) {
values.add(metadataFieldMapping.toDCValue(fieldToMetadata.get(field), value));
}
}
}
return values;
}
public Map<String, MetadataFieldConfig> getFieldToMetadata() {
return fieldToMetadata;
}
public void setFieldToMetadata(Map<String, MetadataFieldConfig> fieldToMetadata) {
this.fieldToMetadata = fieldToMetadata;
}
public MetadataFieldMapping<Map<String, List<String>>,
MetadataContributor<Map<String, List<String>>>> getMetadataFieldMapping() {
return metadataFieldMapping;
}
public void setMetadataFieldMapping(MetadataFieldMapping<Map<String, List<String>>,
MetadataContributor<Map<String, List<String>>>> metadataFieldMapping) {
this.metadataFieldMapping = metadataFieldMapping;
}
}

View File

@@ -0,0 +1,91 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.metadatamapping.contributor;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.dspace.importer.external.metadatamapping.MetadataFieldConfig;
import org.dspace.importer.external.metadatamapping.MetadatumDTO;
import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.Text;
import org.jdom2.filter.Filters;
import org.jdom2.xpath.XPathExpression;
import org.jdom2.xpath.XPathFactory;
/**
* This contributor can be used when parsing an XML file,
* particularly to extract a date and convert it to a specific format.
* In the variable dateFormatFrom the read format should be configured,
* instead in the variable dateFormatTo the format you want to obtain.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor {
private DateFormat dateFormatFrom;
private DateFormat dateFormatTo;
public void setDateFormatFrom(String dateFormatFrom) {
this.dateFormatFrom = new SimpleDateFormat(dateFormatFrom);
}
public void setDateFormatTo(String dateFormatTo) {
this.dateFormatTo = new SimpleDateFormat(dateFormatTo);
}
@Override
public Collection<MetadatumDTO> contributeMetadata(Element element) {
List<MetadatumDTO> values = new LinkedList<>();
List<Namespace> namespaces = new ArrayList<Namespace>();
for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
}
XPathExpression<Object> xpath = XPathFactory.instance()
.compile(query,Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(element);
for (Object el : nodes) {
if (el instanceof Element) {
values.add(getMetadatum(field, ((Element) el).getText()));
} else if (el instanceof Attribute) {
values.add(getMetadatum(field, ((Attribute) el).getValue()));
} else if (el instanceof String) {
values.add(getMetadatum(field, (String) el));
} else if (el instanceof Text) {
values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText()));
} else {
System.err.println("node of type: " + el.getClass());
}
}
return values;
}
private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) {
MetadatumDTO dcValue = new MetadatumDTO();
if (field == null) {
return null;
}
try {
dcValue.setValue(dateFormatTo.format(dateFormatFrom.parse(value)));
} catch (ParseException e) {
dcValue.setValue(value);
}
dcValue.setElement(field.getElement());
dcValue.setQualifier(field.getQualifier());
dcValue.setSchema(field.getSchema());
return dcValue;
}
}

View File

@@ -33,10 +33,10 @@ import org.springframework.beans.factory.annotation.Autowired;
* @author Roeland Dillen (roeland at atmire dot com) * @author Roeland Dillen (roeland at atmire dot com)
*/ */
public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> { public class SimpleXpathMetadatumContributor implements MetadataContributor<Element> {
private MetadataFieldConfig field;
private static final Logger log protected MetadataFieldConfig field;
= org.apache.logging.log4j.LogManager.getLogger();
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger();
/** /**
* Return prefixToNamespaceMapping * Return prefixToNamespaceMapping
@@ -47,7 +47,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
return prefixToNamespaceMapping; return prefixToNamespaceMapping;
} }
private MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping; protected MetadataFieldMapping<Element, MetadataContributor<Element>> metadataFieldMapping;
/** /**
* Return metadataFieldMapping * Return metadataFieldMapping
@@ -79,7 +79,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
this.prefixToNamespaceMapping = prefixToNamespaceMapping; this.prefixToNamespaceMapping = prefixToNamespaceMapping;
} }
private Map<String, String> prefixToNamespaceMapping; protected Map<String, String> prefixToNamespaceMapping;
/** /**
* Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig * Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig
@@ -103,7 +103,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
} }
private String query; protected String query;
/** /**
* Return the MetadataFieldConfig used while retrieving MetadatumDTO * Return the MetadataFieldConfig used while retrieving MetadatumDTO
@@ -154,10 +154,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor<Elem
for (String ns : prefixToNamespaceMapping.keySet()) { for (String ns : prefixToNamespaceMapping.keySet()) {
namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns));
} }
XPathExpression<Object> xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces);
XPathExpression<Object> xpath =
XPathFactory.instance().compile(query, Filters.fpassthrough(), null, namespaces);
List<Object> nodes = xpath.evaluate(t); List<Object> nodes = xpath.evaluate(t);
for (Object el : nodes) { for (Object el : nodes) {
if (el instanceof Element) { if (el instanceof Element) {

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scielo.service;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the Scielo metadatum fields on the DSpace metadatum fields
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it)
*/
@SuppressWarnings("rawtypes")
public class ScieloFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and
* metadata that will be set to the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "scieloMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,263 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.scielo.service;
import java.io.BufferedReader;
import java.io.StringReader;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.el.MethodNotFoundException;
import javax.ws.rs.BadRequestException;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.http.client.utils.URIBuilder;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.FileSourceException;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying Scielo
*
* @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it)
*/
public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<Map<String,List<String>>>
implements QuerySource {
/**
* This pattern is used when reading the Scielo response,
* to check if the fields you are reading is in rid format
*/
private static final String PATTERN = "^([A-Z][A-Z0-9]) - (.*)$";
/**
* This pattern is used to verify correct format of ScieloId
*/
private static final String ID_PATTERN = "^(.....)-(.*)-(...)$";
private int timeout = 1000;
private String url;
@Autowired
private LiveImportClient liveImportClient;
@Override
public void init() throws Exception {}
@Override
public String getImportSource() {
return "scielo";
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query, count, start));
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
return retry(new SearchByQueryCallable(query));
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
List<ImportRecord> records = retry(new SearchByQueryCallable(query));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
List<ImportRecord> records = retry(new FindByIdCallable(id));
return CollectionUtils.isEmpty(records) ? null : records.get(0);
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new SearchNBByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scielo");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scielo");
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for Scielo");
}
/**
* This class is a Callable implementation to count the number of entries for an Scielo query
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchNBByQueryCallable implements Callable<Integer> {
private String query;
private SearchNBByQueryCallable(String queryString) {
this.query = queryString;
}
private SearchNBByQueryCallable(Query query) {
this.query = query.getParameterAsClass("query", String.class);
}
@Override
public Integer call() throws Exception {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8));
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
Map<Integer, Map<String, List<String>>> records = getRecords(resp);
return Objects.nonNull(records.size()) ? records.size() : 0;
}
}
/**
* This class is a Callable implementation to get an Scielo entry using ScieloID
* The ScieloID to use can be passed through the constructor as a String
* or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class FindByIdCallable implements Callable<List<ImportRecord>> {
private String id;
private FindByIdCallable(String id) {
this.id = id;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String scieloId = id.trim();
Pattern risPattern = Pattern.compile(ID_PATTERN);
Matcher risMatcher = risPattern.matcher(scieloId);
if (risMatcher.matches()) {
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8));
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
Map<Integer, Map<String, List<String>>> records = getRecords(resp);
if (Objects.nonNull(records) & !records.isEmpty()) {
results.add(transformSourceRecords(records.get(1)));
}
} else {
throw new BadRequestException("id provided : " + scieloId + " is not an ScieloID");
}
return results;
}
}
/**
* This class is a Callable implementation to get Scielo entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<List<ImportRecord>> {
private Query query;
private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("start", start);
query.addParameter("count", maxResult);
}
private SearchByQueryCallable(Query query) {
this.query = query;
}
@Override
public List<ImportRecord> call() throws Exception {
List<ImportRecord> results = new ArrayList<>();
String q = query.getParameterAsClass("query", String.class);
Integer count = query.getParameterAsClass("count", Integer.class);
Integer start = query.getParameterAsClass("start", Integer.class);
URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8));
uriBuilder.addParameter("start", start.toString());
uriBuilder.addParameter("count", count.toString());
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params);
Map<Integer, Map<String, List<String>>> records = getRecords(resp);
for (int record : records.keySet()) {
results.add(transformSourceRecords(records.get(record)));
}
return results;
}
}
private Map<Integer, Map<String,List<String>>> getRecords(String resp) throws FileSourceException {
Map<Integer, Map<String, List<String>>> records = new HashMap<Integer, Map<String,List<String>>>();
BufferedReader reader;
int countRecord = 0;
try {
reader = new BufferedReader(new StringReader(resp));
String line;
while ((line = reader.readLine()) != null) {
if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) {
continue;
}
line = line.replaceAll("\\uFEFF", "").trim();
Pattern risPattern = Pattern.compile(PATTERN);
Matcher risMatcher = risPattern.matcher(line);
if (risMatcher.matches()) {
if (risMatcher.group(1).equals("TY") & risMatcher.group(2).equals("JOUR")) {
countRecord ++;
Map<String,List<String>> newMap = new HashMap<String, List<String>>();
records.put(countRecord, newMap);
} else {
Map<String, List<String>> tag2values = records.get(countRecord);
List<String> values = tag2values.get(risMatcher.group(1));
if (Objects.isNull(values)) {
List<String> newValues = new ArrayList<String>();
newValues.add(risMatcher.group(2));
tag2values.put(risMatcher.group(1), newValues);
} else {
values.add(risMatcher.group(2));
tag2values.put(risMatcher.group(1), values);
}
}
}
}
} catch (Exception e) {
throw new FileSourceException("Cannot parse RIS file", e);
}
return records;
}
public void setUrl(String url) {
this.url = url;
}
}

View File

@@ -0,0 +1,47 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.service;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility class that provides methods to check if a given string is a DOI
*
* @author Corrado Lombardi (corrado.lombardi at 4science.it)
*/
public class DoiCheck {
private static final List<String> DOI_PREFIXES = Arrays.asList("http://dx.doi.org/", "https://dx.doi.org/");
private static final Pattern PATTERN = Pattern.compile("10.\\d{4,9}/[-._;()/:A-Z0-9]+" +
"|10.1002/[^\\s]+" +
"|10.\\d{4}/\\d+-\\d+X?(\\d+)" +
"\\d+<[\\d\\w]+:[\\d\\w]*>\\d+.\\d+.\\w+;\\d" +
"|10.1021/\\w\\w\\d++" +
"|10.1207/[\\w\\d]+\\&\\d+_\\d+",
Pattern.CASE_INSENSITIVE);
private DoiCheck() {}
public static boolean isDoi(final String value) {
Matcher m = PATTERN.matcher(purgeDoiValue(value));
return m.matches();
}
public static String purgeDoiValue(final String query) {
String value = query.replaceAll(",", "");
for (final String prefix : DOI_PREFIXES) {
value = value.replaceAll(prefix, "");
}
return value.trim();
}
}

View File

@@ -0,0 +1,339 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.vufind;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Callable;
import javax.el.MethodNotFoundException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.utils.URIBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Item;
import org.dspace.importer.external.datamodel.ImportRecord;
import org.dspace.importer.external.datamodel.Query;
import org.dspace.importer.external.exception.MetadataSourceException;
import org.dspace.importer.external.liveimportclient.service.LiveImportClient;
import org.dspace.importer.external.service.AbstractImportMetadataSourceService;
import org.dspace.importer.external.service.components.QuerySource;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implements a data source for querying VuFind
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/
public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService<String>
implements QuerySource {
private final static Logger log = LogManager.getLogger();
private String url;
private String urlSearch;
private String fields;
@Autowired
private LiveImportClient liveImportClient;
public VuFindImportMetadataSourceServiceImpl(String fields) {
this.fields = fields;
}
@Override
public String getImportSource() {
return "VuFind";
}
@Override
public ImportRecord getRecord(String id) throws MetadataSourceException {
String records = retry(new GetByVuFindIdCallable(id, fields));
List<ImportRecord> importRecords = extractMetadataFromRecordList(records);
return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null;
}
@Override
public int getRecordsCount(String query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public int getRecordsCount(Query query) throws MetadataSourceException {
return retry(new CountByQueryCallable(query));
}
@Override
public Collection<ImportRecord> getRecords(String query, int start, int count) throws MetadataSourceException {
String records = retry(new SearchByQueryCallable(query, count, start, fields));
return extractMetadataFromRecordList(records);
}
@Override
public Collection<ImportRecord> getRecords(Query query) throws MetadataSourceException {
String records = retry(new SearchByQueryCallable(query, fields));
return extractMetadataFromRecordList(records);
}
@Override
public ImportRecord getRecord(Query query) throws MetadataSourceException {
String records = retry(new SearchByQueryCallable(query, fields));
List<ImportRecord> importRecords = extractMetadataFromRecordList(records);
return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null;
}
@Override
public Collection<ImportRecord> findMatchingRecords(Query query) throws MetadataSourceException {
String records = retry(new FindMatchingRecordsCallable(query));
return extractMetadataFromRecordList(records);
}
@Override
public Collection<ImportRecord> findMatchingRecords(Item item) throws MetadataSourceException {
throw new MethodNotFoundException("This method is not implemented for VuFind");
}
@Override
public void init() throws Exception {}
/**
* This class is a Callable implementation to count the number of entries for an VuFind query.
* This Callable use as query value to CrossRef the string queryString passed to constructor.
* If the object will be construct through Query.class instance, the value of the Query's
* map with the key "query" will be used.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class CountByQueryCallable implements Callable<Integer> {
private Query query;
public CountByQueryCallable(String queryString) {
query = new Query();
query.addParameter("query", queryString);
}
public CountByQueryCallable(Query query) {
this.query = query;
}
@Override
public Integer call() throws Exception {
Integer start = 0;
Integer count = 1;
int page = start / count + 1;
URIBuilder uriBuilder = new URIBuilder(urlSearch);
uriBuilder.addParameter("type", "AllField");
uriBuilder.addParameter("page", String.valueOf(page));
uriBuilder.addParameter("limit", count.toString());
uriBuilder.addParameter("prettyPrint", String.valueOf(true));
uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class));
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
JsonNode node = convertStringJsonToJsonNode(responseString);
JsonNode resultCountNode = node.get("resultCount");
return resultCountNode.intValue();
}
}
/**
* This class is a Callable implementation to get an VuFind entry using VuFind id
* The id to use can be passed through the constructor as a String or as Query's map entry, with the key "id".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class GetByVuFindIdCallable implements Callable<String> {
private String id;
private String fields;
public GetByVuFindIdCallable(String id, String fields) {
this.id = id;
if (fields != null && fields.length() > 0) {
this.fields = fields;
} else {
this.fields = null;
}
}
@Override
public String call() throws Exception {
URIBuilder uriBuilder = new URIBuilder(url);
uriBuilder.addParameter("id", id);
uriBuilder.addParameter("prettyPrint", "false");
if (StringUtils.isNotBlank(fields)) {
for (String field : fields.split(",")) {
uriBuilder.addParameter("field[]", field);
}
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
return response;
}
}
/**
* This class is a Callable implementation to get VuFind entries based on query object.
* This Callable use as query value the string queryString passed to constructor.
* If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used.
* Pagination is supported too, using the value of the Query's map with keys "start" and "count".
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
private class SearchByQueryCallable implements Callable<String> {
private Query query;
private String fields;
public SearchByQueryCallable(String queryString, Integer maxResult, Integer start, String fields) {
query = new Query();
query.addParameter("query", queryString);
query.addParameter("count", maxResult);
query.addParameter("start", start);
if (StringUtils.isNotBlank(fields)) {
this.fields = fields;
} else {
this.fields = null;
}
}
public SearchByQueryCallable(Query query, String fields) {
this.query = query;
if (StringUtils.isNotBlank(fields)) {
this.fields = fields;
} else {
this.fields = null;
}
}
@Override
public String call() throws Exception {
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
int page = count != 0 ? start / count : 0;
URIBuilder uriBuilder = new URIBuilder(urlSearch);
uriBuilder.addParameter("type", "AllField");
//page looks 1 based (start = 0, count = 20 -> page = 0)
uriBuilder.addParameter("page", String.valueOf(page + 1));
uriBuilder.addParameter("limit", count.toString());
uriBuilder.addParameter("prettyPrint", String.valueOf(true));
uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class));
if (StringUtils.isNotBlank(fields)) {
for (String field : fields.split(",")) {
uriBuilder.addParameter("field[]", field);
}
}
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
}
/**
* This class is a Callable implementation to search VuFind entries using author and title.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class FindMatchingRecordsCallable implements Callable<String> {
private Query query;
private String fields;
public FindMatchingRecordsCallable(Query query) {
this.query = query;
}
@Override
public String call() throws Exception {
String author = query.getParameterAsClass("author", String.class);
String title = query.getParameterAsClass("title", String.class);
Integer start = query.getParameterAsClass("start", Integer.class);
Integer count = query.getParameterAsClass("count", Integer.class);
int page = count != 0 ? start / count : 0;
URIBuilder uriBuilder = new URIBuilder(url);
uriBuilder.addParameter("type", "AllField");
//pagination is 1 based (first page: start = 0, count = 20 -> page = 0 -> +1 = 1)
uriBuilder.addParameter("page", String.valueOf(page ++));
uriBuilder.addParameter("limit", count.toString());
uriBuilder.addParameter("prettyPrint", "true");
if (fields != null && !fields.isEmpty()) {
for (String field : fields.split(",")) {
uriBuilder.addParameter("field[]", field);
}
}
String filter = StringUtils.EMPTY;
if (StringUtils.isNotBlank(author)) {
filter = "author:" + author;
}
if (StringUtils.isNotBlank(title)) {
if (StringUtils.isNotBlank(filter)) {
filter = filter + " AND title:" + title;
} else {
filter = "title:" + title;
}
}
uriBuilder.addParameter("lookfor", filter);
Map<String, Map<String, String>> params = new HashMap<String, Map<String,String>>();
return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params);
}
}
private JsonNode convertStringJsonToJsonNode(String json) {
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
return body;
}
private List<ImportRecord> extractMetadataFromRecordList(String records) {
List<ImportRecord> recordsResult = new ArrayList<>();
JsonNode jsonNode = convertStringJsonToJsonNode(records);
JsonNode node = jsonNode.get("records");
if (Objects.nonNull(node) && node.isArray()) {
Iterator<JsonNode> nodes = node.iterator();
while (nodes.hasNext()) {
recordsResult.add(transformSourceRecords(nodes.next().toString()));
}
}
return recordsResult;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getUrlSearch() {
return urlSearch;
}
public void setUrlSearch(String urlSearch) {
this.urlSearch = urlSearch;
}
}

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.importer.external.vufind.metadatamapping;
import java.util.Map;
import javax.annotation.Resource;
import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping;
/**
* An implementation of {@link AbstractMetadataFieldMapping}
* Responsible for defining the mapping of the VuFind metadatum fields on the DSpace metadatum fields
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
@SuppressWarnings("rawtypes")
public class VuFindFieldMapping extends AbstractMetadataFieldMapping {
/**
* Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it
* only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over
* what metadatafield is generated.
*
* @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to
* the item.
*/
@Override
@SuppressWarnings("unchecked")
@Resource(name = "vufindMetadataFieldMap")
public void setMetadataFieldMap(Map metadataFieldMap) {
super.setMetadataFieldMap(metadataFieldMap);
}
}

View File

@@ -0,0 +1,57 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import com.maxmind.geoip2.DatabaseReader;
import org.apache.commons.lang3.StringUtils;
import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Service that handle the GeoIP database file.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class GeoIpService {
@Autowired
private ConfigurationService configurationService;
/**
* Returns an instance of {@link DatabaseReader} based on the configured db
* file, if any.
*
* @return the Database reader
* @throws IllegalStateException if the db file is not configured correctly
*/
public DatabaseReader getDatabaseReader() throws IllegalStateException {
String dbPath = configurationService.getProperty("usage-statistics.dbfile");
if (StringUtils.isBlank(dbPath)) {
throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!");
}
try {
File dbFile = new File(dbPath);
return new DatabaseReader.Builder(dbFile).build();
} catch (FileNotFoundException fe) {
throw new IllegalStateException(
"The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " +
"based reports! Please see the DSpace installation instructions for instructions to install " +
"this file.",fe);
} catch (IOException e) {
throw new IllegalStateException(
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " +
"DSpace installation instructions for more details.", e);
}
}
}

View File

@@ -8,7 +8,6 @@
package org.dspace.statistics; package org.dspace.statistics;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter; import java.io.FileWriter;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@@ -142,6 +141,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
private ClientInfoService clientInfoService; private ClientInfoService clientInfoService;
@Autowired @Autowired
private SolrStatisticsCore solrStatisticsCore; private SolrStatisticsCore solrStatisticsCore;
@Autowired
private GeoIpService geoIpService;
/** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */ /** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */
private String statisticsCoreURL; private String statisticsCoreURL;
@@ -179,26 +180,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
//spiderIps = SpiderDetector.getSpiderIpAddresses(); //spiderIps = SpiderDetector.getSpiderIpAddresses();
DatabaseReader service = null; DatabaseReader service = null;
// Get the db file for the location try {
String dbPath = configurationService.getProperty("usage-statistics.dbfile"); service = geoIpService.getDatabaseReader();
if (dbPath != null) { } catch (IllegalStateException ex) {
try { log.error(ex);
File dbFile = new File(dbPath);
service = new DatabaseReader.Builder(dbFile).build();
} catch (FileNotFoundException fe) {
log.error(
"The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " +
"based reports! Please see the DSpace installation instructions for instructions to install " +
"this file.",
fe);
} catch (IOException e) {
log.error(
"Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " +
"DSpace installation instructions for more details.",
e);
}
} else {
log.error("The required 'dbfile' configuration is missing in solr-statistics.cfg!");
} }
locationService = service; locationService = service;
} }

View File

@@ -406,6 +406,12 @@ public class DatabaseUtils {
DatabaseMetaData meta = connection.getMetaData(); DatabaseMetaData meta = connection.getMetaData();
String dbType = getDbType(connection); String dbType = getDbType(connection);
System.out.println("\nDatabase Type: " + dbType); System.out.println("\nDatabase Type: " + dbType);
if (dbType.equals(DBMS_ORACLE)) {
System.out.println("====================================");
System.out.println("WARNING: Oracle support is deprecated!");
System.out.println("See https://github.com/DSpace/DSpace/issues/8214");
System.out.println("=====================================");
}
System.out.println("Database URL: " + meta.getURL()); System.out.println("Database URL: " + meta.getURL());
System.out.println("Database Schema: " + getSchemaName(connection)); System.out.println("Database Schema: " + getSchemaName(connection));
System.out.println("Database Username: " + meta.getUserName()); System.out.println("Database Username: " + meta.getUserName());
@@ -539,6 +545,10 @@ public class DatabaseUtils {
String dbType = getDbType(connection); String dbType = getDbType(connection);
connection.close(); connection.close();
if (dbType.equals(DBMS_ORACLE)) {
log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214");
}
// Determine location(s) where Flyway will load all DB migrations // Determine location(s) where Flyway will load all DB migrations
ArrayList<String> scriptLocations = new ArrayList<>(); ArrayList<String> scriptLocations = new ArrayList<>();

View File

@@ -119,3 +119,4 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused
workflow group {1}. Delete the tasks and group first if you want to remove this user. workflow group {1}. Delete the tasks and group first if you want to remove this user.
org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in
org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided
org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks

View File

@@ -1,5 +1,10 @@
# Oracle Flyway Database Migrations (i.e. Upgrades) # Oracle Flyway Database Migrations (i.e. Upgrades)
---
WARNING: Oracle Support is deprecated.
See https://github.com/DSpace/DSpace/issues/8214
---
The SQL scripts in this directory are Oracle-specific database migrations. They are The SQL scripts in this directory are Oracle-specific database migrations. They are
used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/).
As such, these scripts are automatically called by Flyway when the DSpace As such, these scripts are automatically called by Flyway when the DSpace

View File

@@ -115,6 +115,46 @@
</property> </property>
</bean> </bean>
<bean id="CrossRefImportService" class="org.dspace.importer.external.crossref.CrossRefImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="CrossRefMetadataFieldMapping"/>
<property name="url" value="${crossref.url}"/>
</bean>
<bean id="CrossRefMetadataFieldMapping" class="org.dspace.importer.external.crossref.CrossRefFieldMapping"/>
<bean id="EpoImportService" class="org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="epoMetadataFieldMapping"/>
<property name="consumerKey" value="${epo.consumerKey}" />
<property name="consumerSecret" value="${epo.consumerSecretKey}" />
<property name="url" value="${epo.url}" />
<property name="authUrl" value="${epo.authUrl}" />
<property name="searchUrl" value="${epo.searchUrl}" />
<!-- date filed mapped metadata, see epo-integration.xml -->
<property name="dateFiled" ref="epo.dc.date.filed" />
<property name="applicationNumber" ref="epo.dc.identifier" />
</bean>
<bean id="epoMetadataFieldMapping" class="org.dspace.importer.external.epo.service.EpoFieldMapping"/>
<bean id="vufindImportService" class="org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl" scope="singleton">
<!-- Set to empty to use the default set of fields -->
<constructor-arg type="java.lang.String" value=""/>
<property name="metadataFieldMapping" ref="vufindMetadataFieldMapping"/>
<property name="url" value="${vufind.url}"/>
<property name="urlSearch" value="${vufind.url.search}"/>
</bean>
<bean id="vufindMetadataFieldMapping" class="org.dspace.importer.external.vufind.metadatamapping.VuFindFieldMapping"/>
<bean id="ScieloImportService" class="org.dspace.importer.external.scielo.service.ScieloImportMetadataSourceServiceImpl" scope="singleton">
<property name="metadataFieldMapping" ref="scieloMetadataFieldMapping"/>
<property name="url" value="${scielo.url}"/>
</bean>
<bean id="scieloMetadataFieldMapping" class="org.dspace.importer.external.scielo.service.ScieloFieldMapping"/>
<bean id="ADSImportService" class="org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl" scope="singleton">
<property name="apiKey" value="${ads.key}" />
<property name="url" value="${ads.url}" />
<property name="resultFieldList" value="${ads.resultFieldList}" />
<property name="metadataFieldMapping" ref="ADSMetadataFieldMapping"/>
</bean>
<bean id="ADSMetadataFieldMapping" class="org.dspace.importer.external.ads.ADSFieldMapping"/>
<!-- Metadatafield used to check against if it's already imported or not during the JSONLookupSearcher--> <!-- Metadatafield used to check against if it's already imported or not during the JSONLookupSearcher-->
<bean id="lookupID" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig"> <bean id="lookupID" class="org.dspace.importer.external.metadatamapping.MetadataFieldConfig">

View File

@@ -1,45 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at
http://www.dspace.org/license/
-->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context-2.5.xsd"
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"
id="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService">
<property name="issnItemExtractors">
<list>
<bean class="org.dspace.app.sherpa.submit.MetadataValueISSNExtractor">
<property name="metadataList">
<list>
<value>dc.identifier.issn</value>
</list>
</property>
</bean>
<!-- Use the follow if you have the SHERPARoMEOJournalTitle enabled
<bean class="org.dspace.app.sherpa.submit.MetadataAuthorityISSNExtractor">
<property name="metadataList">
<list>
<value>dc.title.alternative</value>
</list>
</property>
</bean> -->
</list>
</property>
</bean>
</beans>

View File

@@ -25,10 +25,38 @@
<property name="timeout" value="5000"/> <property name="timeout" value="5000"/>
</bean> </bean>
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitService" <bean class="org.dspace.app.sherpa.submit.SHERPASubmitService"
id="org.dspace.app.sherpa.submit.SHERPASubmitService"> id="org.dspace.app.sherpa.submit.SHERPASubmitService">
<property name="sherpaService" ref="org.dspace.app.sherpa.SHERPAService"/> <property name="sherpaService" ref="org.dspace.app.sherpa.SHERPAService"/>
<property name="configuration" ref="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"/> <property name="configuration" ref="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"/>
</bean> </bean>
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"
id="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService">
<property name="issnItemExtractors">
<list>
<bean class="org.dspace.app.sherpa.submit.MetadataValueISSNExtractor">
<property name="metadataList">
<list>
<value>dc.identifier.issn</value>
</list>
</property>
</bean>
<!-- Use the follow if you have the SHERPARoMEOJournalTitle enabled
<bean class="org.dspace.app.sherpa.submit.MetadataAuthorityISSNExtractor">
<property name="metadataList">
<list>
<value>dc.title.alternative</value>
</list>
</property>
</bean> -->
</list>
</property>
</bean>
<bean class="org.dspace.app.sherpa.cache.SherpaCacheEvictService">
<property name="sherpaSubmitService"
ref="org.dspace.app.sherpa.submit.SHERPASubmitService"/>
<property name="cacheManager" ref="cacheManager" />
</bean>
</beans> </beans>

View File

@@ -21,6 +21,7 @@
<name-map collection-handle="123456789/language-test-1" submission-name="languagetestprocess"/> <name-map collection-handle="123456789/language-test-1" submission-name="languagetestprocess"/>
<name-map collection-handle="123456789/extraction-test" submission-name="extractiontestprocess"/> <name-map collection-handle="123456789/extraction-test" submission-name="extractiontestprocess"/>
<name-map collection-handle="123456789/qualdrop-test" submission-name="qualdroptest"/> <name-map collection-handle="123456789/qualdrop-test" submission-name="qualdroptest"/>
<name-map collection-handle="123456789/typebind-test" submission-name="typebindtest"/>
<name-map collection-handle="123456789/accessCondition-not-discoverable" submission-name="accessConditionNotDiscoverable"/> <name-map collection-handle="123456789/accessCondition-not-discoverable" submission-name="accessConditionNotDiscoverable"/>
</submission-map> </submission-map>
@@ -82,6 +83,11 @@
<type>submission-form</type> <type>submission-form</type>
</step-definition> </step-definition>
<step-definition id="typebindtest">
<processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class>
<type>submission-form</type>
</step-definition>
<step-definition id="defaultAC"> <step-definition id="defaultAC">
<heading>submit.progressbar.accessCondition</heading> <heading>submit.progressbar.accessCondition</heading>
<processing-class>org.dspace.app.rest.submit.step.AccessConditionStep</processing-class> <processing-class>org.dspace.app.rest.submit.step.AccessConditionStep</processing-class>
@@ -131,6 +137,12 @@
<processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class> <processing-class>org.dspace.app.rest.submit.step.DescribeStep</processing-class>
<type>submission-form</type> <type>submission-form</type>
</step-definition> </step-definition>
<step-definition id="sherpaPolicies" mandatory="true">
<heading>submit.progressbar.sherpapolicy</heading>
<processing-class>org.dspace.app.rest.submit.step.SherpaPolicyStep</processing-class>
<type>sherpaPolicy</type>
</step-definition>
</step-definitions> </step-definitions>
<!-- The submission-definitions map lays out the detailed definition of --> <!-- The submission-definitions map lays out the detailed definition of -->
@@ -166,6 +178,7 @@
<!-- <step id="upload-with-embargo"/> --> <!-- <step id="upload-with-embargo"/> -->
<!-- <step id="extractionstep"/> --> <!-- <step id="extractionstep"/> -->
<step id="defaultAC"/> <step id="defaultAC"/>
<step id="sherpaPolicies"/>
<!--Step will be to Sign off on the License --> <!--Step will be to Sign off on the License -->
<step id="license"/> <step id="license"/>
@@ -191,6 +204,10 @@
<step id="qualdroptest" /> <step id="qualdroptest" />
</submission-process> </submission-process>
<submission-process name="typebindtest">
<step id="typebindtest" />
</submission-process>
<submission-process name="accessConditionNotDiscoverable"> <submission-process name="accessConditionNotDiscoverable">
<step id="collection"/> <step id="collection"/>
<step id="traditionalpageone"/> <step id="traditionalpageone"/>

View File

@@ -144,3 +144,14 @@ authentication-ip.Student = 6.6.6.6
useProxies = true useProxies = true
proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true proxies.trusted.include_ui_ip = true
# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN
management.health.solrOai.enabled = false
researcher-profile.entity-type = Person
# Configuration settings required for Researcher Profiles
# These settings ensure "dspace.object.owner" field are indexed by Authority Control
choices.plugin.dspace.object.owner = EPersonAuthority
choices.presentation.dspace.object.owner = suggest
authority.controlled.dspace.object.owner = true

View File

@@ -6,6 +6,8 @@
<bean class="org.dspace.external.service.impl.ExternalDataServiceImpl"/> <bean class="org.dspace.external.service.impl.ExternalDataServiceImpl"/>
<bean class="org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl"/>
<bean class="org.dspace.external.provider.impl.MockDataProvider" init-method="init"> <bean class="org.dspace.external.provider.impl.MockDataProvider" init-method="init">
<property name="sourceIdentifier" value="mock"/> <property name="sourceIdentifier" value="mock"/>
</bean> </bean>
@@ -13,13 +15,7 @@
<!-- SHERPA data providers set up to use mock SHERPA service --> <!-- SHERPA data providers set up to use mock SHERPA service -->
<bean class="org.dspace.external.provider.impl.SHERPAv2JournalISSNDataProvider" init-method="init"> <bean class="org.dspace.external.provider.impl.SHERPAv2JournalISSNDataProvider" init-method="init">
<property name="sourceIdentifier" value="sherpaJournalIssn"/> <property name="sourceIdentifier" value="sherpaJournalIssn"/>
<property name="sherpaService"> <property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService" />
<bean class="org.dspace.app.sherpa.MockSHERPAService">
<property name="maxNumberOfTries" value="3"/>
<property name="sleepBetweenTimeouts" value="2000"/>
<property name="timeout" value="5000"/>
</bean>
</property>
<property name="supportedEntityTypes"> <property name="supportedEntityTypes">
<list> <list>
<value>Journal</value> <value>Journal</value>
@@ -28,13 +24,7 @@
</bean> </bean>
<bean class="org.dspace.external.provider.impl.SHERPAv2JournalDataProvider" init-method="init"> <bean class="org.dspace.external.provider.impl.SHERPAv2JournalDataProvider" init-method="init">
<property name="sourceIdentifier" value="sherpaJournal"/> <property name="sourceIdentifier" value="sherpaJournal"/>
<property name="sherpaService"> <property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService" />
<bean class="org.dspace.app.sherpa.MockSHERPAService">
<property name="maxNumberOfTries" value="3"/>
<property name="sleepBetweenTimeouts" value="2000"/>
<property name="timeout" value="5000"/>
</bean>
</property>
<property name="supportedEntityTypes"> <property name="supportedEntityTypes">
<list> <list>
<value>Journal</value> <value>Journal</value>
@@ -43,13 +33,7 @@
</bean> </bean>
<bean class="org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider" init-method="init"> <bean class="org.dspace.external.provider.impl.SHERPAv2PublisherDataProvider" init-method="init">
<property name="sourceIdentifier" value="sherpaPublisher"/> <property name="sourceIdentifier" value="sherpaPublisher"/>
<property name="sherpaService"> <property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService" />
<bean class="org.dspace.app.sherpa.MockSHERPAService">
<property name="maxNumberOfTries" value="3"/>
<property name="sleepBetweenTimeouts" value="2000"/>
<property name="timeout" value="5000"/>
</bean>
</property>
<property name="supportedEntityTypes"> <property name="supportedEntityTypes">
<list> <list>
<value>OrgUnit</value> <value>OrgUnit</value>

View File

@@ -33,4 +33,18 @@
</property> </property>
</bean> </bean>
<!-- during test we need the mock sherpa service everywhere -->
<alias name="org.dspace.app.sherpa.SHERPAService" alias="org.dspace.app.sherpa.MockSHERPAService"/>
<bean class="org.dspace.app.sherpa.MockSHERPAService" id="org.dspace.app.sherpa.SHERPAService">
<property name="maxNumberOfTries" value="3"/>
<property name="sleepBetweenTimeouts" value="2000"/>
<property name="timeout" value="5000"/>
</bean>
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitService"
id="org.dspace.app.sherpa.submit.SHERPASubmitService">
<property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService"/>
<property name="configuration" ref="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"/>
</bean>
</beans> </beans>

View File

@@ -47,5 +47,7 @@
<bean id="org.dspace.statistics.SolrStatisticsCore" <bean id="org.dspace.statistics.SolrStatisticsCore"
class="org.dspace.statistics.MockSolrStatisticsCore" class="org.dspace.statistics.MockSolrStatisticsCore"
autowire-candidate="true"/> autowire-candidate="true"/>
<bean class="org.dspace.statistics.GeoIpService" autowire-candidate="true"/>
</beans> </beans>

View File

@@ -1,37 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at
http://www.dspace.org/license/
-->
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context
http://www.springframework.org/schema/context/spring-context.xsd"
default-autowire-candidates="*Service,*DAO,javax.sql.DataSource">
<context:annotation-config/> <!-- allows us to use spring annotations in beans -->
<!-- during test we need the mock sherpa service everywhere -->
<alias name="org.dspace.app.sherpa.SHERPAService" alias="org.dspace.app.sherpa.MockSHERPAService"/>
<bean class="org.dspace.app.sherpa.MockSHERPAService" id="org.dspace.app.sherpa.SHERPAService">
<property name="maxNumberOfTries" value="3"/>
<property name="sleepBetweenTimeouts" value="2000"/>
<property name="timeout" value="5000"/>
</bean>
<bean class="org.dspace.app.sherpa.submit.SHERPASubmitService"
id="org.dspace.app.sherpa.submit.SHERPASubmitService">
<property name="sherpaService" ref="org.dspace.app.sherpa.MockSHERPAService"/>
<property name="configuration" ref="org.dspace.app.sherpa.submit.SHERPASubmitConfigurationService"/>
</bean>
</beans>

View File

@@ -140,6 +140,7 @@
<dc-qualifier>ispartofseries</dc-qualifier> <dc-qualifier>ispartofseries</dc-qualifier>
<repeatable>true</repeatable> <repeatable>true</repeatable>
<label>Series/Report No.</label> <label>Series/Report No.</label>
<type-bind>Technical Report</type-bind>
<input-type>series</input-type> <input-type>series</input-type>
<hint>Enter the series and number assigned to this item by your community.</hint> <hint>Enter the series and number assigned to this item by your community.</hint>
<required></required> <required></required>
@@ -302,6 +303,75 @@ it, please enter the types and the actual numbers or codes.</hint>
</row> </row>
</form> </form>
<form name="typebindtest">
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>title</dc-element>
<dc-qualifier></dc-qualifier>
<repeatable>false</repeatable>
<label>Title</label>
<input-type>onebox</input-type>
<hint>Enter the main title of the item.</hint>
<required>You must enter a main title for this item.</required>
<!-- <language value-pairs-name="common_iso_languages">true</language> -->
</field>
</row>
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>date</dc-element>
<dc-qualifier>issued</dc-qualifier>
<repeatable>false</repeatable>
<label>Date of Issue</label>
<style>col-sm-4</style>
<input-type>date</input-type>
<hint>Please give the date of previous publication or public distribution.
You can leave out the day and/or month if they aren't
applicable.</hint>
<required>You must enter at least the year.</required>
</field>
</row>
<row>
<field>
<dc-schema>dc</dc-schema>
<dc-element>type</dc-element>
<dc-qualifier></dc-qualifier>
<repeatable>true</repeatable>
<label>Type</label>
<input-type value-pairs-name="common_types">dropdown</input-type>
<hint>Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key.</hint>
<required></required>
</field>
</row>
<row>
<!-- ISBN bound to type Book and required -->
<field>
<dc-schema>dc</dc-schema>
<dc-element>identifier</dc-element>
<dc-qualifier>isbn</dc-qualifier>
<repeatable>true</repeatable>
<label>ISBN</label>
<type-bind>Book</type-bind>
<input-type>onebox</input-type>
<hint>Enter the ISBN of the book.</hint>
<required>An ISBN is required.</required>
</field>
<!-- ISBN bound to type Book chapter and NOT required and NOT repeatable -->
<field>
<dc-schema>dc</dc-schema>
<dc-element>identifier</dc-element>
<dc-qualifier>isbn</dc-qualifier>
<repeatable>true</repeatable>
<label>ISBN of Book</label>
<type-bind>Book chapter</type-bind>
<input-type>onebox</input-type>
<hint>Enter the ISBN of the book in which this chapter appears.</hint>
<required></required>
</field>
</row>
</form>
<form name="languagetest"> <form name="languagetest">
<row> <row>
<field> <field>

View File

@@ -0,0 +1,126 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
import java.sql.SQLException;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.access.status.factory.AccessStatusServiceFactory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Unit Tests for access status service
*/
public class AccessStatusServiceTest extends AbstractUnitTest {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStatusServiceTest.class);
private Collection collection;
private Community owningCommunity;
private Item item;
protected CommunityService communityService =
ContentServiceFactory.getInstance().getCommunityService();
protected CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected WorkspaceItemService workspaceItemService =
ContentServiceFactory.getInstance().getWorkspaceItemService();
protected InstallItemService installItemService =
ContentServiceFactory.getInstance().getInstallItemService();
protected AccessStatusService accessStatusService =
AccessStatusServiceFactory.getInstance().getAccessStatusService();
/**
* This method will be run before every test as per @Before. It will
* initialize resources required for the tests.
*
* Other methods can be annotated with @Before here or in subclasses
* but no execution order is guaranteed
*/
@Before
@Override
public void init() {
super.init();
try {
context.turnOffAuthorisationSystem();
owningCommunity = communityService.create(null, context);
collection = collectionService.create(context, owningCommunity);
item = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
} catch (SQLException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
}
}
/**
* This method will be run after every test as per @After. It will
* clean resources initialized by the @Before methods.
*
* Other methods can be annotated with @After here or in subclasses
* but no execution order is guaranteed
*/
@After
@Override
public void destroy() {
context.turnOffAuthorisationSystem();
try {
itemService.delete(context, item);
} catch (Exception e) {
// ignore
}
try {
collectionService.delete(context, collection);
} catch (Exception e) {
// ignore
}
try {
communityService.delete(context, owningCommunity);
} catch (Exception e) {
// ignore
}
context.restoreAuthSystemState();
item = null;
collection = null;
owningCommunity = null;
try {
super.destroy();
} catch (Exception e) {
// ignore
}
}
@Test
public void testGetAccessStatus() throws Exception {
String status = accessStatusService.getAccessStatus(context, item);
assertNotEquals("testGetAccessStatus 0", status, DefaultAccessStatusHelper.UNKNOWN);
}
}

View File

@@ -0,0 +1,423 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Constants;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.joda.time.LocalDate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
private static final Logger log = LogManager.getLogger(DefaultAccessStatusHelperTest.class);
private Collection collection;
private Community owningCommunity;
private Item itemWithoutBundle;
private Item itemWithoutBitstream;
private Item itemWithBitstream;
private Item itemWithEmbargo;
private Item itemWithDateRestriction;
private Item itemWithGroupRestriction;
private Item itemWithoutPolicy;
private Item itemWithoutPrimaryBitstream;
private Item itemWithPrimaryAndMultipleBitstreams;
private Item itemWithoutPrimaryAndMultipleBitstreams;
private DefaultAccessStatusHelper helper;
private Date threshold;
protected CommunityService communityService =
ContentServiceFactory.getInstance().getCommunityService();
protected CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected WorkspaceItemService workspaceItemService =
ContentServiceFactory.getInstance().getWorkspaceItemService();
protected InstallItemService installItemService =
ContentServiceFactory.getInstance().getInstallItemService();
protected BundleService bundleService =
ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService =
ContentServiceFactory.getInstance().getBitstreamService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected GroupService groupService =
EPersonServiceFactory.getInstance().getGroupService();
/**
* This method will be run before every test as per @Before. It will
* initialize resources required for the tests.
*
* Other methods can be annotated with @Before here or in subclasses
* but no execution order is guaranteed
*/
@Before
@Override
public void init() {
super.init();
try {
context.turnOffAuthorisationSystem();
owningCommunity = communityService.create(null, context);
collection = collectionService.create(context, owningCommunity);
itemWithoutBundle = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithEmbargo = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithDateRestriction = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithGroupRestriction = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPolicy = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPrimaryBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithPrimaryAndMultipleBitstreams = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPrimaryAndMultipleBitstreams = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
} catch (SQLException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
}
helper = new DefaultAccessStatusHelper();
threshold = new LocalDate(10000, 1, 1).toDate();
}
/**
* This method will be run after every test as per @After. It will
* clean resources initialized by the @Before methods.
*
* Other methods can be annotated with @After here or in subclasses
* but no execution order is guaranteed
*/
@After
@Override
public void destroy() {
context.turnOffAuthorisationSystem();
try {
itemService.delete(context, itemWithoutBundle);
itemService.delete(context, itemWithoutBitstream);
itemService.delete(context, itemWithBitstream);
itemService.delete(context, itemWithEmbargo);
itemService.delete(context, itemWithDateRestriction);
itemService.delete(context, itemWithGroupRestriction);
itemService.delete(context, itemWithoutPolicy);
itemService.delete(context, itemWithoutPrimaryBitstream);
itemService.delete(context, itemWithPrimaryAndMultipleBitstreams);
itemService.delete(context, itemWithoutPrimaryAndMultipleBitstreams);
} catch (Exception e) {
// ignore
}
try {
collectionService.delete(context, collection);
} catch (Exception e) {
// ignore
}
try {
communityService.delete(context, owningCommunity);
} catch (Exception e) {
// ignore
}
context.restoreAuthSystemState();
itemWithoutBundle = null;
itemWithoutBitstream = null;
itemWithBitstream = null;
itemWithEmbargo = null;
itemWithDateRestriction = null;
itemWithGroupRestriction = null;
itemWithoutPolicy = null;
itemWithoutPrimaryBitstream = null;
itemWithPrimaryAndMultipleBitstreams = null;
itemWithoutPrimaryAndMultipleBitstreams = null;
collection = null;
owningCommunity = null;
helper = null;
threshold = null;
communityService = null;
collectionService = null;
itemService = null;
workspaceItemService = null;
installItemService = null;
bundleService = null;
bitstreamService = null;
resourcePolicyService = null;
groupService = null;
try {
super.destroy();
} catch (Exception e) {
// ignore
}
}
/**
* Test for a null item
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithNullItem() throws Exception {
String status = helper.getAccessStatusFromItem(context, null, threshold);
assertThat("testWithNullItem 0", status, equalTo(DefaultAccessStatusHelper.UNKNOWN));
}
/**
* Test for an item with no bundle
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutBundle() throws Exception {
String status = helper.getAccessStatusFromItem(context, itemWithoutBundle, threshold);
assertThat("testWithoutBundle 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY));
}
/**
* Test for an item with no bitstream
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutBitstream() throws Exception {
context.turnOffAuthorisationSystem();
bundleService.create(context, itemWithoutBitstream, Constants.CONTENT_BUNDLE_NAME);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutBitstream, threshold);
assertThat("testWithoutBitstream 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY));
}
/**
* Test for an item with a basic bitstream (open access)
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithBitstream() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithBitstream, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithBitstream, threshold);
assertThat("testWithBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
/**
* Test for an item with an embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithEmbargo() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithEmbargo, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold);
assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
}
/**
* Test for an item with an anonymous date restriction
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithDateRestriction() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithDateRestriction, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Restriction");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(10000, 1, 1).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithDateRestriction, threshold);
assertThat("testWithDateRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with a group restriction
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithGroupRestriction() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithGroupRestriction, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Restriction");
Group group = groupService.findByName(context, Group.ADMIN);
policy.setGroup(group);
policy.setAction(Constants.READ);
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithGroupRestriction, threshold);
assertThat("testWithGroupRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with no policy
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutPolicy() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPolicy, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
authorizeService.removeAllPolicies(context, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPolicy, threshold);
assertThat("testWithoutPolicy 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with no primary bitstream
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutPrimaryBitstream() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPrimaryBitstream, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "first");
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryBitstream, threshold);
assertThat("testWithoutPrimaryBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
/**
* Test for an item with an open access bitstream
* and another primary bitstream on embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithPrimaryAndMultipleBitstreams() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithPrimaryAndMultipleBitstreams,
Constants.CONTENT_BUNDLE_NAME);
bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
Bitstream primaryBitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bundle.setPrimaryBitstreamID(primaryBitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, primaryBitstream);
authorizeService.addPolicies(context, policies, primaryBitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
}
/**
* Test for an item with an open access bitstream
* and another bitstream on embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithNoPrimaryAndMultipleBitstreams() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPrimaryAndMultipleBitstreams,
Constants.CONTENT_BUNDLE_NAME);
bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
Bitstream anotherBitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, anotherBitstream);
authorizeService.addPolicies(context, policies, anotherBitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
}

View File

@@ -11,6 +11,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.util.Objects;
import org.dspace.app.sherpa.v2.SHERPAPublisherResponse; import org.dspace.app.sherpa.v2.SHERPAPublisherResponse;
import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.app.sherpa.v2.SHERPAResponse;
@@ -25,20 +26,6 @@ import org.dspace.app.sherpa.v2.SHERPAResponse;
*/ */
public class MockSHERPAService extends SHERPAService { public class MockSHERPAService extends SHERPAService {
/**
* Simple overridden 'searchByJournalISSN' so that we do attempt to build the URI but rather than make
* an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our
* test resources.
* If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be
* returned.
* @param query ISSN string to pass in an "issn equals" API query
* @return SHERPAResponse
*/
@Override
public SHERPAResponse searchByJournalISSN(String query) {
return performRequest("publication", "issn", "equals", query, 0, 1);
}
/** /**
* Simple overridden performRequest so that we do attempt to build the URI but rather than make * Simple overridden performRequest so that we do attempt to build the URI but rather than make
* an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our
@@ -67,8 +54,12 @@ public class MockSHERPAService extends SHERPAService {
return new SHERPAResponse("Error building URI"); return new SHERPAResponse("Error building URI");
} }
// Get mock JSON - in this case, a known good result for The Lancet // Get mock JSON
content = getClass().getResourceAsStream("thelancet.json"); // if a file with the name contained in the value does not exist, returns thelancet.json
content = getContent(value.concat(".json"));
if (Objects.isNull(content)) {
content = getContent("thelancet.json");
}
// Parse JSON input stream and return response for later evaluation // Parse JSON input stream and return response for later evaluation
return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON); return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON);
@@ -88,6 +79,10 @@ public class MockSHERPAService extends SHERPAService {
} }
} }
private InputStream getContent(String fileName) {
return getClass().getResourceAsStream(fileName);
}
/** /**
* Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make * Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make
* an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our * an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our
@@ -133,4 +128,5 @@ public class MockSHERPAService extends SHERPAService {
return new SHERPAPublisherResponse(e.getMessage()); return new SHERPAPublisherResponse(e.getMessage());
} }
} }
} }

View File

@@ -11,7 +11,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.List;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.app.sherpa.v2.SHERPAResponse;
@@ -109,20 +108,18 @@ public class SHERPASubmitServiceTest extends AbstractUnitTest {
// Get responses from SHERPA submit service, which should inspect item ISSNs and perform search // Get responses from SHERPA submit service, which should inspect item ISSNs and perform search
// on the mock SHERPA service // on the mock SHERPA service
List<SHERPAResponse> responses = sherpaSubmitService.searchRelatedJournals(context, testItem); SHERPAResponse response = sherpaSubmitService.searchRelatedJournals(context, testItem);
// Make sure response is not null or empty // Make sure response is not null or empty
assertTrue("Response list should not be null or empty", assertTrue("Response should not be null", response != null);
responses != null && !responses.isEmpty());
// For each response (there should be only one based on test data) perform the standard set // For each response (there should be only one based on test data) perform the standard set
// of thorough parsing tests // of thorough parsing tests
for (SHERPAResponse response : responses) {
// Assert response is not error, or fail with message
assertFalse("Response was flagged as 'isError'", response.isError());
// Skip remainder of parsing tests - these are already done in SHERPAServiceTEst // Assert response is not error, or fail with message
} assertFalse("Response was flagged as 'isError'", response.isError());
// Skip remainder of parsing tests - these are already done in SHERPAServiceTEst
} }
} }

View File

@@ -0,0 +1,88 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.ArrayList;
import java.util.List;
import org.dspace.AbstractUnitTest;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests for parsing and utilities on submission config forms / readers
*
* @author Kim Shepherd
*/
public class SubmissionConfigTest extends AbstractUnitTest {
DCInputsReader inputReader;
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() throws DCInputsReaderException {
inputReader = new DCInputsReader();
}
@After
public void tearDown() {
inputReader = null;
}
@Test
public void testReadAndProcessTypeBindSubmissionConfig()
throws SubmissionConfigReaderException, DCInputsReaderException {
// Set up test data. This should match the typebind test submission / form config
String typeBindHandle = "123456789/typebind-test";
String typeBindSubmissionName = "typebindtest";
String typeBindSubmissionStepName = "typebindtest";
// Expected field lists from typebindtest form
List<String> allConfiguredFields = new ArrayList<>();
allConfiguredFields.add("dc.title");
allConfiguredFields.add("dc.date.issued");
allConfiguredFields.add("dc.type");
allConfiguredFields.add("dc.identifier.isbn");
List<String> unboundFields = allConfiguredFields.subList(0, 3);
// Get submission configuration
SubmissionConfig submissionConfig =
new SubmissionConfigReader().getSubmissionConfigByCollection(typeBindHandle);
// Submission name should match name defined in item-submission.xml
assertEquals(typeBindSubmissionName, submissionConfig.getSubmissionName());
// Step 0 - our process only has one step. It should not be null and have the ID typebindtest
SubmissionStepConfig submissionStepConfig = submissionConfig.getStep(0);
assertNotNull(submissionStepConfig);
assertEquals(typeBindSubmissionStepName, submissionStepConfig.getId());
// Get inputs and allowed fields
DCInputSet inputConfig = inputReader.getInputsByFormName(submissionStepConfig.getId());
List<String> allowedFieldsForBook = inputConfig.populateAllowedFieldNames("Book");
List<String> allowedFieldsForBookChapter = inputConfig.populateAllowedFieldNames("Book chapter");
List<String> allowedFieldsForArticle = inputConfig.populateAllowedFieldNames("Article");
List<String> allowedFieldsForNoType = inputConfig.populateAllowedFieldNames(null);
// Book and book chapter should be allowed all 5 fields (each is bound to dc.identifier.isbn)
assertEquals(allConfiguredFields, allowedFieldsForBook);
assertEquals(allConfiguredFields, allowedFieldsForBookChapter);
// Article and type should match a subset of the fields without ISBN
assertEquals(unboundFields, allowedFieldsForArticle);
assertEquals(unboundFields, allowedFieldsForNoType);
}
}

View File

@@ -8,6 +8,8 @@
package org.dspace.builder; package org.dspace.builder;
import static org.dspace.content.LicenseUtils.getLicenseText; import static org.dspace.content.LicenseUtils.getLicenseText;
import static org.dspace.content.MetadataSchemaEnum.DC;
import static org.dspace.content.authority.Choices.CF_ACCEPTED;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
@@ -76,6 +78,11 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
public ItemBuilder withAuthor(final String authorName) { public ItemBuilder withAuthor(final String authorName) {
return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName); return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName);
} }
public ItemBuilder withAuthor(final String authorName, final String authority) {
return addMetadataValue(item, DC.getName(), "contributor", "author", null, authorName, authority, 600);
}
public ItemBuilder withAuthor(final String authorName, final String authority, final int confidence) { public ItemBuilder withAuthor(final String authorName, final String authority, final int confidence) {
return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author",
null, authorName, authority, confidence); null, authorName, authority, confidence);
@@ -147,6 +154,10 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
return addMetadataValue(item, schema, element, qualifier, value); return addMetadataValue(item, schema, element, qualifier, value);
} }
public ItemBuilder withDspaceObjectOwner(String value, String authority) {
return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED);
}
public ItemBuilder makeUnDiscoverable() { public ItemBuilder makeUnDiscoverable() {
item.setDiscoverable(false); item.setDiscoverable(false);
return this; return this;
@@ -175,7 +186,7 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
/** /**
* Create an admin group for the collection with the specified members * Create an admin group for the collection with the specified members
* *
* @param members epersons to add to the admin group * @param ePerson epersons to add to the admin group
* @return this builder * @return this builder
* @throws SQLException * @throws SQLException
* @throws AuthorizeException * @throws AuthorizeException
@@ -184,6 +195,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder<Item> {
return setAdminPermission(item, ePerson, null); return setAdminPermission(item, ePerson, null);
} }
public ItemBuilder withPersonEmail(String email) {
return addMetadataValue(item, "person", "email", null, email);
}
@Override @Override
public Item build() { public Item build() {

View File

@@ -177,14 +177,18 @@ public class WorkspaceItemBuilder extends AbstractBuilder<WorkspaceItem, Workspa
return addMetadataValue(MetadataSchemaEnum.DC.getName(), "subject", null, subject); return addMetadataValue(MetadataSchemaEnum.DC.getName(), "subject", null, subject);
} }
public WorkspaceItemBuilder withAbstract(final String subject) { public WorkspaceItemBuilder withIssn(String issn) {
return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject); return addMetadataValue(MetadataSchemaEnum.DC.getName(), "identifier", "issn", issn);
} }
public WorkspaceItemBuilder withEntityType(final String entityType) { public WorkspaceItemBuilder withEntityType(final String entityType) {
return addMetadataValue("dspace", "entity", "type", entityType); return addMetadataValue("dspace", "entity", "type", entityType);
} }
public WorkspaceItemBuilder withAbstract(final String subject) {
return addMetadataValue(MetadataSchemaEnum.DC.getName(),"description", "abstract", subject);
}
public WorkspaceItemBuilder grantLicense() { public WorkspaceItemBuilder grantLicense() {
Item item = workspaceItem.getItem(); Item item = workspaceItem.getItem();
String license; String license;

View File

@@ -8,6 +8,7 @@
package org.dspace.core; package org.dspace.core;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@@ -511,9 +512,8 @@ public class ContextTest extends AbstractUnitTest {
// Now get our special groups // Now get our special groups
List<Group> specialGroups = instance.getSpecialGroups(); List<Group> specialGroups = instance.getSpecialGroups();
assertThat("testGetSpecialGroup 0", specialGroups.size(), equalTo(2)); assertThat("testGetSpecialGroup size", specialGroups.size(), equalTo(2));
assertThat("testGetSpecialGroup 1", specialGroups.get(0), equalTo(group)); assertThat("testGetSpecialGroup content", specialGroups, hasItems(group, adminGroup));
assertThat("testGetSpecialGroup 1", specialGroups.get(1), equalTo(adminGroup));
// Cleanup our context & group // Cleanup our context & group
groupService.delete(instance, group); groupService.delete(instance, group);

View File

@@ -0,0 +1,65 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.util;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.text.ParseException;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.dspace.importer.external.service.DoiCheck;
import org.junit.Test;
/**
* Test class for the DoiCheck
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
public class DoiCheckTest {
@Test
public void checkDOIsTest() throws ParseException {
for (String doi : DOIsToTest()) {
assertTrue("The: " + doi + " is a doi!", DoiCheck.isDoi(doi));
}
}
@Test
public void checkWrongDOIsTest() throws ParseException {
for (String key : wrongDOIsToTest()) {
assertFalse("This : " + key + " isn't a doi!", DoiCheck.isDoi(key));
}
}
private List<String> DOIsToTest() {
return Arrays.asList(
"10.1430/8105",
"10.1038/nphys1170",
"10.1002/0470841559.ch1",
"10.1594/PANGAEA.726855",
"10.1594/GFZ.GEOFON.gfz2009kciu",
"10.3866/PKU.WHXB201112303",
"10.11467/isss2003.7.1_11",
"10.3972/water973.0145.db"
);
}
private List<String> wrongDOIsToTest() {
return Arrays.asList(
StringUtils.EMPTY,
"123456789",
"nphys1170/10.1038",
"10.", "10",
"10.1038/"
);
}
}

View File

@@ -0,0 +1,3 @@
{
"items": []
}

View File

@@ -0,0 +1,504 @@
{
"items": [
{
"system_metadata": {
"id": 40863,
"uri": "https://v2.sherpa.ac.uk/id/publication/40863",
"date_modified": "2022-03-25 14:08:29",
"publicly_visible": "yes",
"publicly_visible_phrases": [
{
"language": "en",
"phrase": "Yes",
"value": "yes"
}
],
"date_created": "2022-01-11 09:43:53"
},
"tj_status_phrases": [
{
"phrase": "Plan S Approved",
"value": "plan_s_approved",
"language": "en"
}
],
"type_phrases": [
{
"value": "journal",
"phrase": "Journal",
"language": "en"
}
],
"id": 40863,
"issns": [
{
"issn": "2731-0582"
}
],
"publishers": [
{
"relationship_type": "commercial_publisher",
"relationship_type_phrases": [
{
"value": "commercial_publisher",
"phrase": "Commercial Publisher",
"language": "en"
}
],
"publisher": {
"id": 3286,
"name": [
{
"name": "Nature Research",
"language": "en",
"preferred_phrases": [
{
"language": "en",
"phrase": "Name",
"value": "name"
}
],
"preferred": "name",
"language_phrases": [
{
"phrase": "English",
"value": "en",
"language": "en"
}
]
}
],
"imprint_of_id": 62037,
"country": "gb",
"country_phrases": [
{
"value": "gb",
"phrase": "United Kingdom",
"language": "en"
}
],
"publication_count": 87,
"uri": "https://v2.sherpa.ac.uk/id/publisher/3286",
"url": "https://www.nature.com/"
}
}
],
"listed_in_doaj_phrases": [
{
"language": "en",
"phrase": "No",
"value": "no"
}
],
"listed_in_doaj": "no",
"tj_status": [
"plan_s_approved"
],
"publisher_policy": [
{
"open_access_prohibited": "no",
"id": 3286,
"publication_count": 36,
"internal_moniker": "Default Policy",
"urls": [
{
"description": "Self archiving and license to publish",
"url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish"
},
{
"description": "Preprints and Conference Proceedings",
"url": "https://www.nature.com/nature-portfolio/editorial-policies/preprints-and-conference-proceedings"
},
{
"url": "https://www.springernature.com/gp/open-research/policies/accepted-manuscript-terms",
"description": "Accepted manuscript terms of use"
}
],
"open_access_prohibited_phrases": [
{
"value": "no",
"phrase": "No",
"language": "en"
}
],
"uri": "https://v2.sherpa.ac.uk/id/publisher_policy/3286",
"permitted_oa": [
{
"prerequisites": {
"prerequisites_phrases": [
{
"language": "en",
"value": "when_research_article",
"phrase": "If a Research Article"
}
],
"prerequisites": [
"when_research_article"
]
},
"copyright_owner": "authors",
"additional_oa_fee_phrases": [
{
"language": "en",
"value": "no",
"phrase": "No"
}
],
"article_version_phrases": [
{
"language": "en",
"value": "submitted",
"phrase": "Submitted"
}
],
"additional_oa_fee": "no",
"copyright_owner_phrases": [
{
"language": "en",
"value": "authors",
"phrase": "Authors"
}
],
"article_version": [
"submitted"
],
"location": {
"location_phrases": [
{
"value": "authors_homepage",
"phrase": "Author's Homepage",
"language": "en"
},
{
"language": "en",
"phrase": "Funder Designated Location",
"value": "funder_designated_location"
},
{
"language": "en",
"value": "institutional_repository",
"phrase": "Institutional Repository"
},
{
"phrase": "Preprint Repository",
"value": "preprint_repository",
"language": "en"
}
],
"location": [
"authors_homepage",
"funder_designated_location",
"institutional_repository",
"preprint_repository"
]
},
"conditions": [
"Must link to publisher version",
"Upon publication, source must be acknowledged and DOI cited",
"Post-prints are subject to Springer Nature re-use terms",
"Non-commercial use only"
]
},
{
"embargo": {
"units": "months",
"amount": 6,
"units_phrases": [
{
"phrase": "Months",
"value": "months",
"language": "en"
}
]
},
"license": [
{
"license_phrases": [
{
"phrase": "Publisher's Bespoke License",
"value": "bespoke_license",
"language": "en"
}
],
"license": "bespoke_license"
}
],
"article_version_phrases": [
{
"value": "accepted",
"phrase": "Accepted",
"language": "en"
}
],
"additional_oa_fee": "no",
"conditions": [
"Must link to publisher version",
"Published source must be acknowledged and DOI cited",
"Post-prints are subject to Springer Nature re-use terms",
"Non-commercial use only"
],
"copyright_owner_phrases": [
{
"phrase": "Authors",
"value": "authors",
"language": "en"
}
],
"location": {
"location": [
"authors_homepage",
"funder_designated_location",
"institutional_repository",
"named_repository"
],
"location_phrases": [
{
"phrase": "Author's Homepage",
"value": "authors_homepage",
"language": "en"
},
{
"phrase": "Funder Designated Location",
"value": "funder_designated_location",
"language": "en"
},
{
"language": "en",
"value": "institutional_repository",
"phrase": "Institutional Repository"
},
{
"language": "en",
"value": "named_repository",
"phrase": "Named Repository"
}
],
"named_repository": [
"PubMed Central",
"Europe PMC"
]
},
"article_version": [
"accepted"
],
"prerequisites": {
"prerequisites": [
"when_research_article"
],
"prerequisites_phrases": [
{
"value": "when_research_article",
"phrase": "If a Research Article",
"language": "en"
}
]
},
"copyright_owner": "authors",
"additional_oa_fee_phrases": [
{
"language": "en",
"value": "no",
"phrase": "No"
}
]
}
]
},
{
"id": 4410,
"open_access_prohibited": "no",
"urls": [
{
"url": "https://www.springernature.com/gp/open-research/about/the-fundamentals-of-open-access-and-open-research",
"description": "The fundamentals of open access and open research"
},
{
"url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish",
"description": "Self archiving and license to publish"
},
{
"url": "https://www.springernature.com/gp/open-research/policies/journal-policies",
"description": "Open access policies for journals"
}
],
"open_access_prohibited_phrases": [
{
"language": "en",
"phrase": "No",
"value": "no"
}
],
"internal_moniker": "Open Access",
"publication_count": 34,
"permitted_oa": [
{
"additional_oa_fee_phrases": [
{
"language": "en",
"phrase": "Yes",
"value": "yes"
}
],
"copyright_owner": "authors",
"conditions": [
"Published source must be acknowledged with citation"
],
"article_version": [
"published"
],
"copyright_owner_phrases": [
{
"language": "en",
"value": "authors",
"phrase": "Authors"
}
],
"location": {
"location_phrases": [
{
"phrase": "Any Website",
"value": "any_website",
"language": "en"
},
{
"language": "en",
"phrase": "Journal Website",
"value": "this_journal"
}
],
"location": [
"any_website",
"this_journal"
]
},
"additional_oa_fee": "yes",
"article_version_phrases": [
{
"phrase": "Published",
"value": "published",
"language": "en"
}
],
"license": [
{
"license_phrases": [
{
"phrase": "CC BY",
"value": "cc_by",
"language": "en"
}
],
"license": "cc_by",
"version": "4.0"
}
],
"publisher_deposit": [
{
"repository_metadata": {
"type_phrases": [
{
"language": "en",
"value": "disciplinary",
"phrase": "Disciplinary"
}
],
"notes": "Launched as UK PubMed Central (UKPMC) in January 2007, changed to Europe PubMed Central in November 2012.\r\nSpecial item types include: Links",
"url": "http://europepmc.org/",
"type": "disciplinary",
"name": [
{
"name": "Europe PMC",
"language": "en",
"preferred": "name",
"language_phrases": [
{
"value": "en",
"phrase": "English",
"language": "en"
}
],
"preferred_phrases": [
{
"language": "en",
"phrase": "Name",
"value": "name"
}
]
}
]
},
"system_metadata": {
"id": 908,
"uri": "https://v2.sherpa.ac.uk/id/repository/908"
}
},
{
"system_metadata": {
"id": 267,
"uri": "https://v2.sherpa.ac.uk/id/repository/267"
},
"repository_metadata": {
"type_phrases": [
{
"language": "en",
"phrase": "Disciplinary",
"value": "disciplinary"
}
],
"type": "disciplinary",
"url": "http://www.ncbi.nlm.nih.gov/pmc/",
"name": [
{
"language": "en",
"name": "PubMed Central",
"preferred": "name",
"language_phrases": [
{
"language": "en",
"value": "en",
"phrase": "English"
}
],
"preferred_phrases": [
{
"language": "en",
"value": "name",
"phrase": "Name"
}
]
}
]
}
}
]
}
],
"uri": "https://v2.sherpa.ac.uk/id/publisher_policy/4410"
}
],
"title": [
{
"preferred_phrases": [
{
"language": "en",
"phrase": "Title",
"value": "name"
}
],
"language_phrases": [
{
"language": "en",
"value": "en",
"phrase": "English"
}
],
"preferred": "name",
"title": "Nature Synthesis",
"language": "en"
}
],
"type": "journal",
"url": "https://www.nature.com/natsynth/"
}
]
}

View File

@@ -57,6 +57,13 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-rest</artifactId> <artifactId>spring-boot-starter-data-rest</artifactId>
<version>${spring-boot.version}</version> <version>${spring-boot.version}</version>
<exclusions>
<!-- Later version brought in by spring-boot-starter-web above -->
<exclusion>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jdk8</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
@@ -73,7 +80,6 @@
<dependency> <dependency>
<groupId>javax.cache</groupId> <groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId> <artifactId>cache-api</artifactId>
<version>1.1.0</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.ehcache/ehcache --> <!-- https://mvnrepository.com/artifact/org.ehcache/ehcache -->
<dependency> <dependency>

View File

@@ -72,7 +72,12 @@ public class DSpaceOAIDataProvider {
private DSpaceResumptionTokenFormatter resumptionTokenFormat = new DSpaceResumptionTokenFormatter(); private DSpaceResumptionTokenFormatter resumptionTokenFormat = new DSpaceResumptionTokenFormatter();
@RequestMapping({"", "/"}) @RequestMapping("")
public void index(HttpServletResponse response, HttpServletRequest request) throws IOException {
response.sendRedirect(request.getRequestURI() + "/");
}
@RequestMapping({"/"})
public String indexAction(HttpServletResponse response, Model model) throws ServletException { public String indexAction(HttpServletResponse response, Model model) throws ServletException {
try { try {
XOAIManager manager = xoaiManagerResolver.getManager(); XOAIManager manager = xoaiManagerResolver.getManager();

View File

@@ -19,7 +19,6 @@
<properties> <properties>
<!-- This is the path to the root [dspace-src] directory. --> <!-- This is the path to the root [dspace-src] directory. -->
<root.basedir>${basedir}/..</root.basedir> <root.basedir>${basedir}/..</root.basedir>
<spring-security.version>5.3.10.RELEASE</spring-security.version>
</properties> </properties>
<build> <build>
<plugins> <plugins>

View File

@@ -264,6 +264,13 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-rest</artifactId> <artifactId>spring-boot-starter-data-rest</artifactId>
<version>${spring-boot.version}</version> <version>${spring-boot.version}</version>
<exclusions>
<!-- Later version brought in by spring-boot-starter-web above -->
<exclusion>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jdk8</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
@@ -271,6 +278,12 @@
<artifactId>spring-boot-starter-aop</artifactId> <artifactId>spring-boot-starter-aop</artifactId>
<version>${spring-boot.version}</version> <version>${spring-boot.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
<version>${spring-boot.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.flipkart.zjsonpatch</groupId> <groupId>com.flipkart.zjsonpatch</groupId>
@@ -457,7 +470,6 @@
<dependency> <dependency>
<groupId>javax.cache</groupId> <groupId>javax.cache</groupId>
<artifactId>cache-api</artifactId> <artifactId>cache-api</artifactId>
<version>1.1.0</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.ehcache/ehcache --> <!-- https://mvnrepository.com/artifact/org.ehcache/ehcache -->
<dependency> <dependency>

View File

@@ -161,6 +161,7 @@ public class Application extends SpringBootServletInitializer {
@Override @Override
public void addCorsMappings(@NonNull CorsRegistry registry) { public void addCorsMappings(@NonNull CorsRegistry registry) {
// Get allowed origins for api and iiif endpoints. // Get allowed origins for api and iiif endpoints.
// The actuator endpoints are configured using management.endpoints.web.cors.* properties
String[] corsAllowedOrigins = configuration String[] corsAllowedOrigins = configuration
.getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig());
String[] iiifAllowedOrigins = configuration String[] iiifAllowedOrigins = configuration

View File

@@ -7,8 +7,13 @@
*/ */
package org.dspace.app.rest; package org.dspace.app.rest;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo;
import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Arrays; import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
@@ -19,9 +24,11 @@ import org.dspace.app.rest.model.AuthenticationStatusRest;
import org.dspace.app.rest.model.AuthenticationTokenRest; import org.dspace.app.rest.model.AuthenticationTokenRest;
import org.dspace.app.rest.model.AuthnRest; import org.dspace.app.rest.model.AuthnRest;
import org.dspace.app.rest.model.EPersonRest; import org.dspace.app.rest.model.EPersonRest;
import org.dspace.app.rest.model.GroupRest;
import org.dspace.app.rest.model.hateoas.AuthenticationStatusResource; import org.dspace.app.rest.model.hateoas.AuthenticationStatusResource;
import org.dspace.app.rest.model.hateoas.AuthenticationTokenResource; import org.dspace.app.rest.model.hateoas.AuthenticationTokenResource;
import org.dspace.app.rest.model.hateoas.AuthnResource; import org.dspace.app.rest.model.hateoas.AuthnResource;
import org.dspace.app.rest.model.hateoas.EmbeddedPage;
import org.dspace.app.rest.model.wrapper.AuthenticationToken; import org.dspace.app.rest.model.wrapper.AuthenticationToken;
import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.projection.Projection;
import org.dspace.app.rest.security.RestAuthenticationService; import org.dspace.app.rest.security.RestAuthenticationService;
@@ -34,6 +41,10 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.web.PagedResourcesAssembler;
import org.springframework.hateoas.EntityModel;
import org.springframework.hateoas.Link; import org.springframework.hateoas.Link;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
@@ -109,6 +120,8 @@ public class AuthenticationRestController implements InitializingBean {
if (context.getCurrentUser() != null) { if (context.getCurrentUser() != null) {
ePersonRest = converter.toRest(context.getCurrentUser(), projection); ePersonRest = converter.toRest(context.getCurrentUser(), projection);
} }
List<GroupRest> groupList = context.getSpecialGroups().stream()
.map(g -> (GroupRest) converter.toRest(g, projection)).collect(Collectors.toList());
AuthenticationStatusRest authenticationStatusRest = new AuthenticationStatusRest(ePersonRest); AuthenticationStatusRest authenticationStatusRest = new AuthenticationStatusRest(ePersonRest);
// When not authenticated add WWW-Authenticate so client can retrieve all available authentication methods // When not authenticated add WWW-Authenticate so client can retrieve all available authentication methods
@@ -120,11 +133,41 @@ public class AuthenticationRestController implements InitializingBean {
} }
authenticationStatusRest.setAuthenticationMethod(context.getAuthenticationMethod()); authenticationStatusRest.setAuthenticationMethod(context.getAuthenticationMethod());
authenticationStatusRest.setProjection(projection); authenticationStatusRest.setProjection(projection);
authenticationStatusRest.setSpecialGroups(groupList);
AuthenticationStatusResource authenticationStatusResource = converter.toResource(authenticationStatusRest); AuthenticationStatusResource authenticationStatusResource = converter.toResource(authenticationStatusRest);
return authenticationStatusResource; return authenticationStatusResource;
} }
/**
* Check the current user's authentication status (i.e. whether they are authenticated or not) and,
* if authenticated, retrieves the current context's special groups.
* @param page
* @param assembler
* @param request
* @param response
* @return
* @throws SQLException
*/
@RequestMapping(value = "/status/specialGroups", method = RequestMethod.GET)
public EntityModel retrieveSpecialGroups(Pageable page, PagedResourcesAssembler assembler,
HttpServletRequest request, HttpServletResponse response)
throws SQLException {
Context context = ContextUtil.obtainContext(request);
Projection projection = utils.obtainProjection();
List<GroupRest> groupList = context.getSpecialGroups().stream()
.map(g -> (GroupRest) converter.toRest(g, projection)).collect(Collectors.toList());
Page<GroupRest> groupPage = (Page<GroupRest>) utils.getPage(groupList, page);
Link link = linkTo(
methodOn(AuthenticationRestController.class).retrieveSpecialGroups(page, assembler, request, response))
.withSelfRel();
return EntityModel.of(new EmbeddedPage(link.getHref(),
groupPage.map(converter::toResource), null, "specialGroups"));
}
/** /**
* Check whether the login has succeeded or not. The actual login is performed by one of the enabled login filters * Check whether the login has succeeded or not. The actual login is performed by one of the enabled login filters
* (e.g. {@link org.dspace.app.rest.security.StatelessLoginFilter}). * (e.g. {@link org.dspace.app.rest.security.StatelessLoginFilter}).

View File

@@ -153,8 +153,9 @@ public class BitstreamRestController {
} }
org.dspace.app.rest.utils.BitstreamResource bitstreamResource = org.dspace.app.rest.utils.BitstreamResource bitstreamResource =
new org.dspace.app.rest.utils.BitstreamResource( new org.dspace.app.rest.utils.BitstreamResource(name, uuid,
name, uuid, currentUser != null ? currentUser.getID() : null, citationEnabledForBitstream); currentUser != null ? currentUser.getID() : null,
context.getSpecialGroupUuids(), citationEnabledForBitstream);
//We have all the data we need, close the connection to the database so that it doesn't stay open during //We have all the data we need, close the connection to the database so that it doesn't stay open during
//download/streaming //download/streaming

View File

@@ -23,6 +23,7 @@ import javax.xml.transform.stream.StreamResult;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.app.rest.utils.ScopeResolver;
import org.dspace.app.util.SyndicationFeed; import org.dspace.app.util.SyndicationFeed;
import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.factory.UtilServiceFactory;
import org.dspace.app.util.service.OpenSearchService; import org.dspace.app.util.service.OpenSearchService;
@@ -35,12 +36,17 @@ import org.dspace.core.Context;
import org.dspace.core.LogHelper; import org.dspace.core.LogHelper;
import org.dspace.core.Utils; import org.dspace.core.Utils;
import org.dspace.discovery.DiscoverQuery; import org.dspace.discovery.DiscoverQuery;
import org.dspace.discovery.DiscoverQuery.SORT_ORDER;
import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.DiscoverResult;
import org.dspace.discovery.IndexableObject; import org.dspace.discovery.IndexableObject;
import org.dspace.discovery.SearchService;
import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchServiceException;
import org.dspace.discovery.SearchUtils; import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoveryConfigurationService;
import org.dspace.discovery.configuration.DiscoverySearchFilter; import org.dspace.discovery.configuration.DiscoverySearchFilter;
import org.dspace.discovery.indexobject.IndexableItem;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller; import org.springframework.stereotype.Controller;
import org.springframework.ui.Model; import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
@@ -67,8 +73,17 @@ public class OpenSearchController {
private AuthorizeService authorizeService; private AuthorizeService authorizeService;
private OpenSearchService openSearchService; private OpenSearchService openSearchService;
@Autowired
private SearchService searchService;
@Autowired
private DiscoveryConfigurationService searchConfigurationService;
private Context context; private Context context;
@Autowired
private ScopeResolver scopeResolver;
/** /**
* This method provides the OpenSearch query on the path /search * This method provides the OpenSearch query on the path /search
* It will pass the result as a OpenSearchDocument directly to the client * It will pass the result as a OpenSearchDocument directly to the client
@@ -80,6 +95,9 @@ public class OpenSearchController {
@RequestParam(name = "start", required = false) Integer start, @RequestParam(name = "start", required = false) Integer start,
@RequestParam(name = "rpp", required = false) Integer count, @RequestParam(name = "rpp", required = false) Integer count,
@RequestParam(name = "format", required = false) String format, @RequestParam(name = "format", required = false) String format,
@RequestParam(name = "sort", required = false) String sort,
@RequestParam(name = "sort_direction", required = false) String sortDirection,
@RequestParam(name = "scope", required = false) String dsoObject,
Model model) throws IOException, ServletException { Model model) throws IOException, ServletException {
context = ContextUtil.obtainContext(request); context = ContextUtil.obtainContext(request);
if (start == null) { if (start == null) {
@@ -115,9 +133,34 @@ public class OpenSearchController {
// support pagination parameters // support pagination parameters
DiscoverQuery queryArgs = new DiscoverQuery(); DiscoverQuery queryArgs = new DiscoverQuery();
queryArgs.setQuery(query); if (query == null) {
query = "";
} else {
queryArgs.setQuery(query);
}
queryArgs.setStart(start); queryArgs.setStart(start);
queryArgs.setMaxResults(count); queryArgs.setMaxResults(count);
queryArgs.setDSpaceObjectFilter(IndexableItem.TYPE);
if (sort != null) {
//this is the default sort so we want to switch this to date accessioned
if (sortDirection != null && sortDirection.equals("DESC")) {
queryArgs.setSortField(sort + "_sort", SORT_ORDER.desc);
} else {
queryArgs.setSortField(sort + "_sort", SORT_ORDER.asc);
}
} else {
queryArgs.setSortField("dc.date.accessioned_dt", SORT_ORDER.desc);
}
if (dsoObject != null) {
container = scopeResolver.resolveScope(context, dsoObject);
DiscoveryConfiguration discoveryConfiguration = searchConfigurationService
.getDiscoveryConfigurationByNameOrDso("site", container);
queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId());
queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries()
.toArray(
new String[discoveryConfiguration.getDefaultFilterQueries()
.size()]));
}
// Perform the search // Perform the search
DiscoverResult qResults = null; DiscoverResult qResults = null;

View File

@@ -0,0 +1,82 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT;
import static org.dspace.core.Constants.EPERSON;
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.utils.Utils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ControllerUtils;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* This controller will handle all the incoming calls on the/api/authz/resourcepolicies/{id}/eperson endpoint
* where the id corresponds to the ResourcePolicy of which you want to replace the related EPerson.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
@RestController
@RequestMapping("/api/authz/resourcepolicies" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + "/eperson")
public class ResourcePolicyEPersonReplaceRestController {
@Autowired
private Utils utils;
@Autowired
private ResourcePolicyService resourcePolicyService;
@PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')")
@RequestMapping(method = PUT, consumes = {"text/uri-list"})
public ResponseEntity<RepresentationModel<?>> replaceEPersonOfResourcePolicy(@PathVariable Integer id,
HttpServletResponse response, HttpServletRequest request) throws SQLException, AuthorizeException {
Context context = obtainContext(request);
List<DSpaceObject> dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request));
if (dsoList.size() != 1 || dsoList.get(0).getType() != EPERSON) {
throw new UnprocessableEntityException(
"The EPerson doesn't exist or the data cannot be resolved to an EPerson.");
}
ResourcePolicy resourcePolicy = resourcePolicyService.find(context, id);
if (Objects.isNull(resourcePolicy)) {
throw new ResourceNotFoundException("ResourcePolicy with id: " + id + " not found");
}
if (Objects.isNull(resourcePolicy.getEPerson())) {
throw new UnprocessableEntityException("ResourcePolicy with id:" + id + " doesn't link to an EPerson");
}
EPerson newEPerson = (EPerson) dsoList.get(0);
resourcePolicy.setEPerson(newEPerson);
context.commit();
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
}

View File

@@ -0,0 +1,82 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.utils.ContextUtil.obtainContext;
import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT;
import static org.dspace.core.Constants.GROUP;
import static org.springframework.web.bind.annotation.RequestMethod.PUT;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.utils.Utils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ControllerUtils;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.hateoas.RepresentationModel;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* This controller will handle all the incoming calls on the/api/authz/resourcepolicies/{id}/group endpoint
* where the id corresponds to the ResourcePolicy of which you want to replace the related Group.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com)
*/
@RestController
@RequestMapping("/api/authz/resourcepolicies" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + "/group")
public class ResourcePolicyGroupReplaceRestController {
@Autowired
private Utils utils;
@Autowired
private ResourcePolicyService resourcePolicyService;
@PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')")
@RequestMapping(method = PUT, consumes = {"text/uri-list"})
public ResponseEntity<RepresentationModel<?>> replaceGroupOfResourcePolicy(@PathVariable Integer id,
HttpServletResponse response, HttpServletRequest request) throws SQLException, AuthorizeException {
Context context = obtainContext(request);
List<DSpaceObject> dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request));
if (dsoList.size() != 1 || dsoList.get(0).getType() != GROUP) {
throw new UnprocessableEntityException("The Group doesn't exist or the data cannot be resolved to a Group");
}
ResourcePolicy resourcePolicy = resourcePolicyService.find(context, id);
if (Objects.isNull(resourcePolicy)) {
throw new ResourceNotFoundException("ResourcePolicy with id: " + id + " not found!");
}
if (Objects.isNull(resourcePolicy.getGroup())) {
throw new UnprocessableEntityException("ResourcePolicy with id:" + id + " doesn't link to a Group");
}
Group newGroup = (Group) dsoList.get(0);
resourcePolicy.setGroup(newGroup);
context.commit();
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
}

Some files were not shown because too many files have changed in this diff Show More