diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 34539abc16..7f58a49f9e 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -31,6 +31,11 @@ jobs: # We turn off 'latest' tag by default. TAGS_FLAVOR: | latest=false + # Architectures / Platforms for which we will build Docker images + # If this is a PR, we ONLY build for AMD64. For PRs we only do a sanity check test to ensure Docker builds work. + # If this is NOT a PR (e.g. a tag or merge commit), also build for ARM64. NOTE: The ARM64 build takes MUCH + # longer (around 45mins or so) which is why we only run it when pushing a new Docker image. + PLATFORMS: linux/amd64${{ github.event_name != 'pull_request' && ', linux/arm64' || '' }} steps: # https://github.com/actions/checkout @@ -41,6 +46,10 @@ jobs: - name: Setup Docker Buildx uses: docker/setup-buildx-action@v1 + # https://github.com/docker/setup-qemu-action + - name: Set up QEMU emulation to build for multiple architectures + uses: docker/setup-qemu-action@v2 + # https://github.com/docker/login-action - name: Login to DockerHub # Only login if not a PR, as PRs only trigger a Docker build and not a push @@ -70,6 +79,7 @@ jobs: with: context: . file: ./Dockerfile.dependencies + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -95,6 +105,7 @@ jobs: with: context: . file: ./Dockerfile + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -123,6 +134,7 @@ jobs: with: context: . file: ./Dockerfile.test + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} @@ -148,9 +160,10 @@ jobs: with: context: . file: ./Dockerfile.cli + platforms: ${{ env.PLATFORMS }} # For pull requests, we run the Docker build (to ensure no PR changes break the build), # but we ONLY do an image push to DockerHub if it's NOT a PR push: ${{ github.event_name != 'pull_request' }} # Use tags / labels provided by 'docker/metadata-action' above tags: ${{ steps.meta_build_cli.outputs.tags }} - labels: ${{ steps.meta_build_cli.outputs.labels }} \ No newline at end of file + labels: ${{ steps.meta_build_cli.outputs.labels }} diff --git a/README.md b/README.md index 864a099c1d..37a46a70c9 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Documentation for each release may be viewed online or downloaded via our [Docum The latest DSpace Installation instructions are available at: https://wiki.lyrasis.org/display/DSDOC7x/Installing+DSpace -Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL or Oracle) +Please be aware that, as a Java web application, DSpace requires a database (PostgreSQL) and a servlet container (usually Tomcat) in order to function. More information about these and all other prerequisites can be found in the Installation instructions above. diff --git a/dspace-api/pom.xml b/dspace-api/pom.xml index fc7349b379..b4cad7853f 100644 --- a/dspace-api/pom.xml +++ b/dspace-api/pom.xml @@ -336,7 +336,6 @@ - org.apache.logging.log4j log4j-api @@ -361,6 +360,23 @@ ehcache ${ehcache.version} + + + org.springframework.boot + spring-boot-starter-cache + ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-logging + + + + + javax.cache + cache-api + org.hibernate hibernate-jpamodelgen @@ -862,6 +878,13 @@ mockserver-junit-rule 5.11.2 test + + + + org.yaml + snakeyaml + + diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java new file mode 100644 index 0000000000..1cacbf6aed --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusHelper.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Plugin interface for the access status calculation. + */ +public interface AccessStatusHelper { + /** + * Calculate the access status for the item. + * + * @param context the DSpace context + * @param item the item + * @return an access status value + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java new file mode 100644 index 0000000000..544dc99cb4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/AccessStatusServiceImpl.java @@ -0,0 +1,66 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.core.service.PluginService; +import org.dspace.services.ConfigurationService; +import org.joda.time.LocalDate; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation for the access status calculation service. + */ +public class AccessStatusServiceImpl implements AccessStatusService { + // Plugin implementation, set from the DSpace configuration by init(). + protected AccessStatusHelper helper = null; + + protected Date forever_date = null; + + @Autowired(required = true) + protected ConfigurationService configurationService; + + @Autowired(required = true) + protected PluginService pluginService; + + /** + * Initialize the bean (after dependency injection has already taken place). + * Ensures the configurationService is injected, so that we can get the plugin + * and the forever embargo date threshold from the configuration. + * Called by "init-method" in Spring configuration. + * + * @throws Exception on generic exception + */ + public void init() throws Exception { + if (helper == null) { + helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class); + if (helper == null) { + throw new IllegalStateException("The AccessStatusHelper plugin was not defined in " + + "DSpace configuration."); + } + + // Defines the embargo forever date threshold for the access status. + // Look at EmbargoService.FOREVER for some improvements? + int year = configurationService.getIntProperty("access.status.embargo.forever.year"); + int month = configurationService.getIntProperty("access.status.embargo.forever.month"); + int day = configurationService.getIntProperty("access.status.embargo.forever.day"); + + forever_date = new LocalDate(year, month, day).toDate(); + } + } + + @Override + public String getAccessStatus(Context context, Item item) throws SQLException { + return helper.getAccessStatusFromItem(context, item, forever_date); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java new file mode 100644 index 0000000000..a67fa67af3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/DefaultAccessStatusHelper.java @@ -0,0 +1,159 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import java.sql.SQLException; +import java.util.Date; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.DSpaceObject; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.dspace.eperson.Group; + +/** + * Default plugin implementation of the access status helper. + * The getAccessStatusFromItem method provides a simple logic to + * calculate the access status of an item based on the policies of + * the primary or the first bitstream in the original bundle. + * Users can override this method for enhanced functionality. + */ +public class DefaultAccessStatusHelper implements AccessStatusHelper { + public static final String EMBARGO = "embargo"; + public static final String METADATA_ONLY = "metadata.only"; + public static final String OPEN_ACCESS = "open.access"; + public static final String RESTRICTED = "restricted"; + public static final String UNKNOWN = "unknown"; + + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected AuthorizeService authorizeService = + AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + public DefaultAccessStatusHelper() { + super(); + } + + /** + * Look at the item's policies to determine an access status value. + * It is also considering a date threshold for embargos and restrictions. + * + * If the item is null, simply returns the "unknown" value. + * + * @param context the DSpace context + * @param item the item to embargo + * @param threshold the embargo threshold date + * @return an access status value + */ + @Override + public String getAccessStatusFromItem(Context context, Item item, Date threshold) + throws SQLException { + if (item == null) { + return UNKNOWN; + } + // Consider only the original bundles. + List bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME); + // Check for primary bitstreams first. + Bitstream bitstream = bundles.stream() + .map(bundle -> bundle.getPrimaryBitstream()) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + if (bitstream == null) { + // If there is no primary bitstream, + // take the first bitstream in the bundles. + bitstream = bundles.stream() + .map(bundle -> bundle.getBitstreams()) + .flatMap(List::stream) + .findFirst() + .orElse(null); + } + return caculateAccessStatusForDso(context, bitstream, threshold); + } + + /** + * Look at the DSpace object's policies to determine an access status value. + * + * If the object is null, returns the "metadata.only" value. + * If any policy attached to the object is valid for the anonymous group, + * returns the "open.access" value. + * Otherwise, if the policy start date is before the embargo threshold date, + * returns the "embargo" value. + * Every other cases return the "restricted" value. + * + * @param context the DSpace context + * @param dso the DSpace object + * @param threshold the embargo threshold date + * @return an access status value + */ + private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold) + throws SQLException { + if (dso == null) { + return METADATA_ONLY; + } + // Only consider read policies. + List policies = authorizeService + .getPoliciesActionFilter(context, dso, Constants.READ); + int openAccessCount = 0; + int embargoCount = 0; + int restrictedCount = 0; + int unknownCount = 0; + // Looks at all read policies. + for (ResourcePolicy policy : policies) { + boolean isValid = resourcePolicyService.isDateValid(policy); + Group group = policy.getGroup(); + // The group must not be null here. However, + // if it is, consider this as an unexpected case. + if (group == null) { + unknownCount++; + } else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) { + // Only calculate the status for the anonymous group. + if (isValid) { + // If the policy is valid, the anonymous group have access + // to the bitstream. + openAccessCount++; + } else { + Date startDate = policy.getStartDate(); + if (startDate != null && !startDate.before(threshold)) { + // If the policy start date have a value and if this value + // is equal or superior to the configured forever date, the + // access status is also restricted. + restrictedCount++; + } else { + // If the current date is not between the policy start date + // and end date, the access status is embargo. + embargoCount++; + } + } + } + } + if (openAccessCount > 0) { + return OPEN_ACCESS; + } + if (embargoCount > 0 && restrictedCount == 0) { + return EMBARGO; + } + if (unknownCount > 0) { + return UNKNOWN; + } + return RESTRICTED; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java new file mode 100644 index 0000000000..77d8f6b448 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactory.java @@ -0,0 +1,25 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.services.factory.DSpaceServicesFactory; + +/** + * Abstract factory to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public abstract class AccessStatusServiceFactory { + + public abstract AccessStatusService getAccessStatusService(); + + public static AccessStatusServiceFactory getInstance() { + return DSpaceServicesFactory.getInstance().getServiceManager() + .getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class); + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java new file mode 100644 index 0000000000..fe3848cb2b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/factory/AccessStatusServiceFactoryImpl.java @@ -0,0 +1,26 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.factory; + +import org.dspace.access.status.service.AccessStatusService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Factory implementation to get services for the access status package, + * use AccessStatusServiceFactory.getInstance() to retrieve an implementation. + */ +public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory { + + @Autowired(required = true) + private AccessStatusService accessStatusService; + + @Override + public AccessStatusService getAccessStatusService() { + return accessStatusService; + } +} diff --git a/dspace-api/src/main/java/org/dspace/access/status/package-info.java b/dspace-api/src/main/java/org/dspace/access/status/package-info.java new file mode 100644 index 0000000000..2c0ed22cd4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/package-info.java @@ -0,0 +1,30 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +/** + *

+ * Access status allows the users to view the bitstreams availability before + * browsing into the item itself. + *

+ *

+ * The access status is calculated through a pluggable class: + * {@link org.dspace.access.status.AccessStatusHelper}. + * The {@link org.dspace.access.status.AccessStatusServiceImpl} + * must be configured to specify this class, as well as a forever embargo date + * threshold year, month and day. + *

+ *

+ * See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation + * based on the primary or the first bitstream of the original bundle. You can + * supply your own class to implement more complex access statuses. + *

+ *

+ * For now, the access status is calculated when the item is shown in a list. + *

+ */ + +package org.dspace.access.status; diff --git a/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java new file mode 100644 index 0000000000..43de5e3c47 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/access/status/service/AccessStatusService.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status.service; + +import java.sql.SQLException; + +import org.dspace.content.Item; +import org.dspace.core.Context; + +/** + * Public interface to the access status subsystem. + *

+ * Configuration properties: (with examples) + * {@code + * # values for the forever embargo date threshold + * # This threshold date is used in the default access status helper to dermine if an item is + * # restricted or embargoed based on the start date of the primary (or first) file policies. + * # In this case, if the policy start date is inferior to the threshold date, the status will + * # be embargo, else it will be restricted. + * # You might want to change this threshold based on your needs. For example: some databases + * # doesn't accept a date superior to 31 december 9999. + * access.status.embargo.forever.year = 10000 + * access.status.embargo.forever.month = 1 + * access.status.embargo.forever.day = 1 + * # implementation of access status helper plugin - replace with local implementation if applicable + * # This default access status helper provides an item status based on the policies of the primary + * # bitstream (or first bitstream in the original bundles if no primary file is specified). + * plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper + * } + */ +public interface AccessStatusService { + + /** + * Calculate the access status for an Item while considering the forever embargo date threshold. + * + * @param context the DSpace context + * @param item the item + * @throws SQLException An exception that provides information on a database access error or other errors. + */ + public String getAccessStatus(Context context, Item item) throws SQLException; +} diff --git a/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java new file mode 100644 index 0000000000..8291af87fc --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/exception/ResourceAlreadyExistsException.java @@ -0,0 +1,32 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.exception; + +/** + * This class provides an exception to be used when trying to save a resource + * that already exists. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResourceAlreadyExistsException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + /** + * Create a ResourceAlreadyExistsException with a message and the already + * existing resource. + * + * @param message the error message + */ + public ResourceAlreadyExistsException(String message) { + super(message); + } + + +} diff --git a/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfile.java b/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfile.java new file mode 100644 index 0000000000..584b505044 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfile.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.profile; + +import static org.dspace.core.Constants.READ; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; + +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.util.UUIDUtils; +import org.springframework.util.Assert; + +/** + * Object representing a Researcher Profile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfile { + + private final Item item; + + private final MetadataValue dspaceObjectOwner; + + /** + * Create a new ResearcherProfile object from the given item. + * + * @param item the profile item + * @throws IllegalArgumentException if the given item has not a dspace.object.owner + * metadata with a valid authority + */ + public ResearcherProfile(Item item) { + Assert.notNull(item, "A researcher profile requires an item"); + this.item = item; + this.dspaceObjectOwner = getDspaceObjectOwnerMetadata(item); + } + + public UUID getId() { + return UUIDUtils.fromString(dspaceObjectOwner.getAuthority()); + } + + /** + * A profile is considered visible if accessible by anonymous users. This method + * returns true if the given item has a READ policy related to ANONYMOUS group, + * false otherwise. + */ + public boolean isVisible() { + return item.getResourcePolicies().stream() + .filter(policy -> policy.getGroup() != null) + .anyMatch(policy -> READ == policy.getAction() && ANONYMOUS.equals(policy.getGroup().getName())); + } + + public Item getItem() { + return item; + } + + private MetadataValue getDspaceObjectOwnerMetadata(Item item) { + return getMetadataValue(item, "dspace.object.owner") + .filter(metadata -> UUIDUtils.fromString(metadata.getAuthority()) != null) + .orElseThrow( + () -> new IllegalArgumentException("A profile item must have a valid dspace.object.owner metadata") + ); + } + + private Optional getMetadataValue(Item item, String metadataField) { + return getMetadataValues(item, metadataField).findFirst(); + } + + private Stream getMetadataValues(Item item, String metadataField) { + return item.getMetadata().stream() + .filter(metadata -> metadataField.equals(metadata.getMetadataField().toString('.'))); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfileServiceImpl.java b/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfileServiceImpl.java new file mode 100644 index 0000000000..22977463f7 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/profile/ResearcherProfileServiceImpl.java @@ -0,0 +1,367 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.profile; + +import static java.util.Optional.empty; +import static java.util.Optional.of; +import static java.util.Optional.ofNullable; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.WRITE; +import static org.dspace.eperson.Group.ANONYMOUS; + +import java.io.IOException; +import java.net.URI; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; +import java.util.UUID; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.dspace.app.exception.ResourceAlreadyExistsException; +import org.dspace.app.profile.service.ResearcherProfileService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.WorkspaceItem; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Context; +import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverResult; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; +import org.dspace.discovery.SearchServiceException; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.Group; +import org.dspace.eperson.service.GroupService; +import org.dspace.services.ConfigurationService; +import org.dspace.util.UUIDUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link ResearcherProfileService}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfileServiceImpl implements ResearcherProfileService { + + private static Logger log = LoggerFactory.getLogger(ResearcherProfileServiceImpl.class); + + @Autowired + private ItemService itemService; + + @Autowired + private WorkspaceItemService workspaceItemService; + + @Autowired + private InstallItemService installItemService; + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private CollectionService collectionService; + + @Autowired + private SearchService searchService; + + @Autowired + private GroupService groupService; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return null; + } + + return new ResearcherProfile(profileItem); + } + + @Override + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Collection collection = findProfileCollection(context) + .orElseThrow(() -> new IllegalStateException("No collection found for researcher profiles")); + + context.turnOffAuthorisationSystem(); + try { + + Item item = createProfileItem(context, ePerson, collection); + return new ResearcherProfile(item); + + } finally { + context.restoreAuthSystemState(); + } + + } + + @Override + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException { + Assert.notNull(id, "An id must be provided to find a researcher profile"); + + Item profileItem = findResearcherProfileItemById(context, id); + if (profileItem == null) { + return; + } + + if (isHardDeleteEnabled()) { + deleteItem(context, profileItem); + } else { + removeOwnerMetadata(context, profileItem); + } + + } + + @Override + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException { + + if (profile.isVisible() == visible) { + return; + } + + Item item = profile.getItem(); + Group anonymous = groupService.findByName(context, ANONYMOUS); + + if (visible) { + authorizeService.addPolicy(context, item, READ, anonymous); + } else { + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + } + + @Override + public ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException { + + Item profileItem = findResearcherProfileItemById(context, ePerson.getID()); + if (profileItem != null) { + throw new ResourceAlreadyExistsException("A profile is already linked to the provided User"); + } + + Item item = findItemByURI(context, uri) + .orElseThrow(() -> new IllegalArgumentException("No item found by URI " + uri)); + + if (!item.isArchived() || item.isWithdrawn()) { + throw new IllegalArgumentException( + "Only archived items can be claimed to create a researcher profile. Item ID: " + item.getID()); + } + + if (!hasProfileType(item)) { + throw new IllegalArgumentException("The provided item has not a profile type. Item ID: " + item.getID()); + } + + if (haveDifferentEmail(item, ePerson)) { + throw new IllegalArgumentException("The provided item is not claimable because it has a different email " + + "than the given user's email. Item ID: " + item.getID()); + } + + String existingOwner = itemService.getMetadataFirstValue(item, "dspace", "object", "owner", Item.ANY); + + if (StringUtils.isNotBlank(existingOwner)) { + throw new IllegalArgumentException("Item with provided uri has already an owner - ID: " + existingOwner); + } + + context.turnOffAuthorisationSystem(); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, + ePerson.getName(), ePerson.getID().toString(), CF_ACCEPTED); + context.restoreAuthSystemState(); + + return new ResearcherProfile(item); + } + + @Override + public boolean hasProfileType(Item item) { + String profileType = getProfileType(); + if (StringUtils.isBlank(profileType)) { + return false; + } + return profileType.equals(itemService.getEntityTypeLabel(item)); + } + + @Override + public String getProfileType() { + return configurationService.getProperty("researcher-profile.entity-type", "Person"); + } + + private Optional findItemByURI(final Context context, final URI uri) throws SQLException { + String path = uri.getPath(); + UUID uuid = UUIDUtils.fromString(path.substring(path.lastIndexOf("/") + 1)); + return ofNullable(itemService.find(context, uuid)); + } + + /** + * Search for an profile item owned by an eperson with the given id. + */ + private Item findResearcherProfileItemById(Context context, UUID id) throws SQLException, AuthorizeException { + + String profileType = getProfileType(); + + Iterator items = itemService.findByAuthorityValue(context, "dspace", "object", "owner", id.toString()); + while (items.hasNext()) { + Item item = items.next(); + String entityType = itemService.getEntityTypeLabel(item); + if (profileType.equals(entityType)) { + return item; + } + } + + return null; + } + + /** + * Returns a Profile collection based on a configuration or searching for a + * collection of researcher profile type. + */ + private Optional findProfileCollection(Context context) throws SQLException, SearchServiceException { + return findConfiguredProfileCollection(context) + .or(() -> findFirstCollectionByProfileEntityType(context)); + } + + /** + * Create a new profile item for the given ePerson in the provided collection. + */ + private Item createProfileItem(Context context, EPerson ePerson, Collection collection) + throws AuthorizeException, SQLException { + + String id = ePerson.getID().toString(); + String fullName = ePerson.getFullName(); + + WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, true); + Item item = workspaceItem.getItem(); + itemService.addMetadata(context, item, "dc", "title", null, null, fullName); + itemService.addMetadata(context, item, "person", "email", null, null, ePerson.getEmail()); + itemService.addMetadata(context, item, "dspace", "object", "owner", null, fullName, id, CF_ACCEPTED); + + item = installItemService.installItem(context, workspaceItem); + + if (isNewProfileNotVisibleByDefault()) { + Group anonymous = groupService.findByName(context, ANONYMOUS); + authorizeService.removeGroupPolicies(context, item, anonymous); + } + + authorizeService.addPolicy(context, item, READ, ePerson); + authorizeService.addPolicy(context, item, WRITE, ePerson); + + return reloadItem(context, item); + } + + private Optional findConfiguredProfileCollection(Context context) throws SQLException { + UUID uuid = UUIDUtils.fromString(configurationService.getProperty("researcher-profile.collection.uuid")); + if (uuid == null) { + return Optional.empty(); + } + + Collection collection = collectionService.find(context, uuid); + if (collection == null) { + return Optional.empty(); + } + + if (isNotProfileCollection(collection)) { + log.warn("The configured researcher-profile.collection.uuid " + + "has an invalid entity type, expected " + getProfileType()); + return Optional.empty(); + } + + return of(collection); + } + + @SuppressWarnings("rawtypes") + private Optional findFirstCollectionByProfileEntityType(Context context) { + + String profileType = getProfileType(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + profileType); + + DiscoverResult discoverResult = search(context, discoverQuery); + List indexableObjects = discoverResult.getIndexableObjects(); + + if (CollectionUtils.isEmpty(indexableObjects)) { + return empty(); + } + + return ofNullable((Collection) indexableObjects.get(0).getIndexedObject()); + } + + private boolean isHardDeleteEnabled() { + return configurationService.getBooleanProperty("researcher-profile.hard-delete.enabled"); + } + + private boolean isNewProfileNotVisibleByDefault() { + return !configurationService.getBooleanProperty("researcher-profile.set-new-profile-visible"); + } + + private boolean isNotProfileCollection(Collection collection) { + String entityType = collectionService.getMetadataFirstValue(collection, "dspace", "entity", "type", Item.ANY); + return entityType == null || !entityType.equals(getProfileType()); + } + + private boolean haveDifferentEmail(Item item, EPerson currentUser) { + return itemService.getMetadataByMetadataString(item, "person.email").stream() + .map(MetadataValue::getValue) + .filter(StringUtils::isNotBlank) + .noneMatch(email -> email.equalsIgnoreCase(currentUser.getEmail())); + } + + private void removeOwnerMetadata(Context context, Item profileItem) throws SQLException { + List metadata = itemService.getMetadata(profileItem, "dspace", "object", "owner", Item.ANY); + itemService.removeMetadataValues(context, profileItem, metadata); + } + + private Item reloadItem(Context context, Item item) throws SQLException { + context.uncacheEntity(item); + return context.reloadEntity(item); + } + + private void deleteItem(Context context, Item profileItem) throws SQLException, AuthorizeException { + try { + context.turnOffAuthorisationSystem(); + itemService.delete(context, profileItem); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + context.restoreAuthSystemState(); + } + } + + private DiscoverResult search(Context context, DiscoverQuery discoverQuery) { + try { + return searchService.search(context, discoverQuery); + } catch (SearchServiceException e) { + throw new RuntimeException(e); + } + } + +} diff --git a/dspace-api/src/main/java/org/dspace/app/profile/service/ResearcherProfileService.java b/dspace-api/src/main/java/org/dspace/app/profile/service/ResearcherProfileService.java new file mode 100644 index 0000000000..359f91761a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/profile/service/ResearcherProfileService.java @@ -0,0 +1,112 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.profile.service; + +import java.net.URI; +import java.sql.SQLException; +import java.util.UUID; + +import org.dspace.app.profile.ResearcherProfile; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; +import org.dspace.eperson.EPerson; + +/** + * Service interface class for the {@link ResearcherProfile} object. The + * implementation of this class is responsible for all business logic calls for + * the {@link ResearcherProfile} object. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface ResearcherProfileService { + + /** + * Find the ResearcherProfile by UUID. + * + * @param context the relevant DSpace Context. + * @param id the ResearcherProfile id + * @return the found ResearcherProfile + * @throws SQLException + * @throws AuthorizeException + */ + public ResearcherProfile findById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Create a new researcher profile for the given ePerson. + * + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @return the created profile + * @throws SQLException + * @throws AuthorizeException + * @throws SearchServiceException + */ + public ResearcherProfile createAndReturn(Context context, EPerson ePerson) + throws AuthorizeException, SQLException, SearchServiceException; + + /** + * Delete the profile with the given id. Based on the + * researcher-profile.hard-delete.enabled configuration, this method deletes the + * related item or removes the association between the researcher profile and + * eperson related to the input uuid. + * + * @param context the relevant DSpace Context. + * @param id the researcher profile id + * @throws AuthorizeException + * @throws SQLException + */ + public void deleteById(Context context, UUID id) throws SQLException, AuthorizeException; + + /** + * Changes the visibility of the given profile using the given new visible + * value. The visiblity controls whether the Profile is Anonymous READ or not. + * + * @param context the relevant DSpace Context. + * @param profile the researcher profile to update + * @param visible the visible value to set. If true the profile will + * be visible to all users. + * @throws SQLException + * @throws AuthorizeException + */ + public void changeVisibility(Context context, ResearcherProfile profile, boolean visible) + throws AuthorizeException, SQLException; + + /** + * Claims and links an eperson to an existing DSpaceObject + * @param context the relevant DSpace Context. + * @param ePerson the ePerson + * @param uri uri of existing Item to be linked to the + * eperson + * @return the created profile + * @throws IllegalArgumentException if the given uri is not related to an + * archived item or if the item cannot be + * claimed + */ + ResearcherProfile claim(Context context, EPerson ePerson, URI uri) + throws SQLException, AuthorizeException, SearchServiceException; + + /** + * Check if the given item has an entity type compatible with that of the + * researcher profile. If the given item does not have an entity type, the check + * returns false. + * + * @param item the item to check + * @return the check result + */ + boolean hasProfileType(Item item); + + /** + * Returns the profile entity type, if any. + * + * @return the profile type + */ + String getProfileType(); +} diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java index 87198fe172..ead725e842 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/SHERPAService.java @@ -31,6 +31,7 @@ import org.dspace.app.sherpa.v2.SHERPAResponse; import org.dspace.app.sherpa.v2.SHERPAUtils; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; /** * SHERPAService is responsible for making the HTTP call to the SHERPA v2 API @@ -43,6 +44,7 @@ import org.springframework.beans.factory.annotation.Autowired; * @author Kim Shepherd */ public class SHERPAService { + private CloseableHttpClient client = null; private int maxNumberOfTries; @@ -91,6 +93,7 @@ public class SHERPAService { * @param query ISSN string to pass in an "issn equals" API query * @return SHERPAResponse containing an error or journal policies */ + @Cacheable(key = "#query", cacheNames = "sherpa.searchByJournalISSN") public SHERPAResponse searchByJournalISSN(String query) { return performRequest("publication", "issn", "equals", query, 0, 1); } @@ -413,4 +416,5 @@ public class SHERPAService { public void setTimeout(int timeout) { this.timeout = timeout; } -} + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java new file mode 100644 index 0000000000..94ecfb5e21 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheEvictService.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import java.util.Objects; +import java.util.Set; + +import org.dspace.app.sherpa.submit.SHERPASubmitService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.springframework.cache.CacheManager; + +/** + * This service is responsible to deal with the SherpaService cache. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SherpaCacheEvictService { + + // The cache that is managed by this service. + static final String CACHE_NAME = "sherpa.searchByJournalISSN"; + + private CacheManager cacheManager; + + private SHERPASubmitService sherpaSubmitService; + + /** + * Remove immediately from the cache all the response that are related to a specific item + * extracting the ISSNs from the item + * + * @param context The DSpace context + * @param item an Item + */ + public void evictCacheValues(Context context, Item item) { + Set ISSNs = sherpaSubmitService.getISSNs(context, item); + for (String issn : ISSNs) { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(issn); + } + } + + /** + * Invalidate immediately the Sherpa cache + */ + public void evictAllCacheValues() { + Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate(); + } + + /** + * Set the reference to the cacheManager + * + * @param cacheManager + */ + public void setCacheManager(CacheManager cacheManager) { + this.cacheManager = cacheManager; + } + + /** + * Set the reference to the SherpaSubmitService + * + * @param sherpaSubmitService + */ + public void setSherpaSubmitService(SHERPASubmitService sherpaSubmitService) { + this.sherpaSubmitService = sherpaSubmitService; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java new file mode 100644 index 0000000000..e84fb7775a --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/cache/SherpaCacheLogger.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.cache; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.ehcache.event.CacheEvent; +import org.ehcache.event.CacheEventListener; + +/** + * This is a EHCache listner responsible for logging sherpa cache events. It is + * bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a + * dedicated Logger for each cache as the CacheEvent doesn't include details + * about where the event occur + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + * + */ +public class SherpaCacheLogger implements CacheEventListener { + + private static final Logger log = LogManager.getLogger(SherpaCacheLogger.class); + + @Override + public void onEvent(CacheEvent cacheEvent) { + log.debug("Sherpa Cache Event Type: {} | Key: {} ", + cacheEvent.getType(), cacheEvent.getKey()); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java index f34e2b6d57..b795c8a2b2 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/submit/SHERPASubmitService.java @@ -9,7 +9,6 @@ package org.dspace.app.sherpa.submit; import java.util.Iterator; import java.util.LinkedHashSet; -import java.util.LinkedList; import java.util.List; import java.util.Set; @@ -63,19 +62,19 @@ public class SHERPASubmitService { * issnItemExtractor(s) in the SHERPA spring configuration. * The ISSNs are not validated with a regular expression or other rules - any values * extracted will be included in API queries. + * Return the first not empty response from Sherpa * @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @param context DSpace context * @param item DSpace item containing ISSNs to be checked * @return SHERPA v2 API response (policy data) */ - public List searchRelatedJournals(Context context, Item item) { + public SHERPAResponse searchRelatedJournals(Context context, Item item) { Set issns = getISSNs(context, item); if (issns == null || issns.size() == 0) { return null; } else { // SHERPA v2 API no longer supports "OR'd" ISSN search, perform individual searches instead Iterator issnIterator = issns.iterator(); - List responses = new LinkedList<>(); while (issnIterator.hasNext()) { String issn = issnIterator.next(); SHERPAResponse response = sherpaService.searchByJournalISSN(issn); @@ -83,14 +82,13 @@ public class SHERPASubmitService { // Continue with loop log.warn("Failed to look up SHERPA ROMeO result for ISSN: " + issn + ": " + response.getMessage()); + return response; + } else if (!response.getJournals().isEmpty()) { + // return this response, if it is not empty + return response; } - // Store this response, even if it has an error (useful for UI reporting) - responses.add(response); } - if (responses.isEmpty()) { - responses.add(new SHERPAResponse("SHERPA ROMeO lookup failed")); - } - return responses; + return new SHERPAResponse(); } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java new file mode 100644 index 0000000000..c6a0bb7942 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAEmbargo.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.sherpa.v2; + +import java.io.Serializable; + +/** + * Model class for the Embargo of SHERPAv2 API (JSON) + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SHERPAEmbargo implements Serializable { + + private static final long serialVersionUID = 6140668058547523656L; + + private int amount; + private String units; + + public SHERPAEmbargo(int amount, String units) { + this.amount = amount; + this.units = units; + } + + public int getAmount() { + return amount; + } + + public void setAmount(int amount) { + this.amount = amount; + } + + public String getUnits() { + return units; + } + + public void setUnits(String units) { + this.units = units; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java index b668dbd927..8728eb1a79 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAJournal.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** @@ -21,7 +22,7 @@ import java.util.List; * * @author Kim Shepherd */ -public class SHERPAJournal { +public class SHERPAJournal implements Serializable { private List titles; private String url; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java index 3a810c8e9e..85d5f8960a 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPermittedVersion.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; /** @@ -28,7 +29,9 @@ import java.util.List; * * @see SHERPAPublisherPolicy */ -public class SHERPAPermittedVersion { +public class SHERPAPermittedVersion implements Serializable { + + private static final long serialVersionUID = 4992181606327727442L; // Version (submitted, accepted, published) private String articleVersion; @@ -47,11 +50,6 @@ public class SHERPAPermittedVersion { // Embargo private SHERPAEmbargo embargo; - protected static class SHERPAEmbargo { - String units; - int amount; - } - public String getArticleVersion() { return articleVersion; } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java index 0097ec2fb3..ee1491ed8b 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisher.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA Publisher object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2; * @see SHERPAJournal * @see SHERPAPublisherResponse */ -public class SHERPAPublisher { +public class SHERPAPublisher implements Serializable { private String name = null; private String relationshipType; private String country; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java index 2a04564e28..3e76c5cd37 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAPublisherPolicy.java @@ -7,6 +7,7 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; import java.util.List; import java.util.Map; @@ -22,7 +23,7 @@ import java.util.Map; * @see SHERPAJournal * @see SHERPAPermittedVersion */ -public class SHERPAPublisherPolicy { +public class SHERPAPublisherPolicy implements Serializable { private int id; private boolean openAccessPermitted; diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java index a40814bafe..83dd1e0d3c 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPAResponse.java @@ -10,12 +10,15 @@ package org.dspace.app.sherpa.v2; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.Serializable; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Date; import java.util.List; import java.util.Map; import java.util.TreeMap; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.json.JSONArray; @@ -33,7 +36,10 @@ import org.json.JSONTokener; * @author Kim Shepherd * */ -public class SHERPAResponse { +public class SHERPAResponse implements Serializable { + + private static final long serialVersionUID = 2732963970169240597L; + // Is this response to be treated as an error? private boolean error; @@ -52,6 +58,9 @@ public class SHERPAResponse { // SHERPA URI (the human page version of this API response) private String uri; + @JsonIgnore + private Date retrievalTime = new Date(); + // Format enum - currently only JSON is supported public enum SHERPAFormat { JSON, XML @@ -71,6 +80,11 @@ public class SHERPAResponse { } } + /** + * Create an empty SHERPAResponse representation + */ + public SHERPAResponse() {} + /** * Parse the SHERPA v2 API JSON and construct Romeo policy data for display * This method does not return a value, but rather populates the metadata and journals objects @@ -479,6 +493,12 @@ public class SHERPAResponse { } permittedVersion.setLicenses(sherpaLicenses); + if (permitted.has("embargo")) { + JSONObject embargo = permitted.getJSONObject("embargo"); + SHERPAEmbargo SHERPAEmbargo = new SHERPAEmbargo(embargo.getInt("amount"), embargo.getString("units")); + permittedVersion.setEmbargo(SHERPAEmbargo); + } + return permittedVersion; } @@ -542,4 +562,8 @@ public class SHERPAResponse { public SHERPASystemMetadata getMetadata() { return metadata; } + + public Date getRetrievalTime() { + return retrievalTime; + } } diff --git a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java index 2a807940bb..65b07c1811 100644 --- a/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java +++ b/dspace-api/src/main/java/org/dspace/app/sherpa/v2/SHERPASystemMetadata.java @@ -7,6 +7,8 @@ */ package org.dspace.app.sherpa.v2; +import java.io.Serializable; + /** * Plain java representation of a SHERPA System Metadata object, based on SHERPA API v2 responses. * @@ -18,7 +20,7 @@ package org.dspace.app.sherpa.v2; * * @author Kim Shepherd */ -public class SHERPASystemMetadata { +public class SHERPASystemMetadata implements Serializable { private int id; private String uri; diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java index 32fd5d634d..f9fc97ec09 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInput.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInput.java @@ -561,6 +561,15 @@ public class DCInput { return true; } + /** + * Get the type bind list for use in determining whether + * to display this field in angular dynamic form building + * @return list of bound types + */ + public List getTypeBindList() { + return typeBind; + } + /** * Verify whether the current field contains an entity relationship * This also implies a relationship type is defined for this field diff --git a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java index bfd4270cf2..2359bf1bff 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java +++ b/dspace-api/src/main/java/org/dspace/app/util/DCInputSet.java @@ -7,6 +7,7 @@ */ package org.dspace.app.util; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -176,4 +177,50 @@ public class DCInputSet { return true; } + /** + * Iterate DC input rows and populate a list of all allowed field names in this submission configuration. + * This is important because an input can be configured repeatedly in a form (for example it could be required + * for type Book, and allowed but not required for type Article). + * If the field is allowed for this document type it'll never be stripped from metadata on validation. + * + * This can be more efficient than isFieldPresent to avoid looping the input set with each check. + * + * @param documentTypeValue Document type eg. Article, Book + * @return ArrayList of field names to use in validation + */ + public List populateAllowedFieldNames(String documentTypeValue) { + List allowedFieldNames = new ArrayList<>(); + // Before iterating each input for validation, run through all inputs + fields and populate a lookup + // map with inputs for this type. Because an input can be configured repeatedly in a form (for example + // it could be required for type Book, and allowed but not required for type Article), allowed=true will + // always take precedence + for (DCInput[] row : inputs) { + for (DCInput input : row) { + if (input.isQualdropValue()) { + List inputPairs = input.getPairs(); + //starting from the second element of the list and skipping one every time because the display + // values are also in the list and before the stored values. + for (int i = 1; i < inputPairs.size(); i += 2) { + String fullFieldname = input.getFieldName() + "." + inputPairs.get(i); + if (input.isAllowedFor(documentTypeValue)) { + if (!allowedFieldNames.contains(fullFieldname)) { + allowedFieldNames.add(fullFieldname); + } + // For the purposes of qualdrop, we have to add the field name without the qualifier + // too, or a required qualdrop will get confused and incorrectly reject a value + if (!allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } else { + if (input.isAllowedFor(documentTypeValue) && !allowedFieldNames.contains(input.getFieldName())) { + allowedFieldNames.add(input.getFieldName()); + } + } + } + } + return allowedFieldNames; + } + } diff --git a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java index 8bbec234c9..7871518b93 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SyndicationFeed.java @@ -193,13 +193,11 @@ public class SyndicationFeed { String defaultTitle = null; boolean podcastFeed = false; this.request = request; - // dso is null for the whole site, or a search without scope if (dso == null) { defaultTitle = configurationService.getProperty("dspace.name"); feed.setDescription(localize(labels, MSG_FEED_DESCRIPTION)); objectURL = resolveURL(request, null); - logoURL = configurationService.getProperty("webui.feed.logo.url"); } else { Bitstream logo = null; if (dso instanceof IndexableCollection) { @@ -329,7 +327,8 @@ public class SyndicationFeed { dcDescriptionField != null) { DCModule dc = new DCModuleImpl(); if (dcCreatorField != null) { - List dcAuthors = itemService.getMetadataByMetadataString(item, dcCreatorField); + List dcAuthors = itemService + .getMetadataByMetadataString(item, dcCreatorField); if (dcAuthors.size() > 0) { List creators = new ArrayList<>(); for (MetadataValue author : dcAuthors) { @@ -345,7 +344,8 @@ public class SyndicationFeed { } } if (dcDescriptionField != null) { - List v = itemService.getMetadataByMetadataString(item, dcDescriptionField); + List v = itemService + .getMetadataByMetadataString(item, dcDescriptionField); if (v.size() > 0) { StringBuilder descs = new StringBuilder(); for (MetadataValue d : v) { @@ -376,6 +376,7 @@ public class SyndicationFeed { enc.setLength(bit.getSizeBytes()); enc.setUrl(urlOfBitstream(request, bit)); enclosures.add(enc); + } } } diff --git a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java index dab8cd5b2e..ca5b4a11b5 100644 --- a/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authority/AuthoritySolrServiceImpl.java @@ -50,7 +50,7 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho */ protected SolrClient solr = null; - protected SolrClient getSolr() + public SolrClient getSolr() throws MalformedURLException, SolrServerException, IOException { if (solr == null) { @@ -67,7 +67,11 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); - solrServer.query(solrQuery); + try { + solrServer.query(solrQuery); + } catch (Exception ex) { + log.error("An error occurs querying authority solr core", ex); + } solr = solrServer; } diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 919e82f14f..a9874afda6 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -7,6 +7,9 @@ */ package org.dspace.authorize; +import static org.dspace.app.util.AuthorizeUtil.canCollectionAdminManageAccounts; +import static org.dspace.app.util.AuthorizeUtil.canCommunityAdminManageAccounts; + import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; @@ -900,6 +903,16 @@ public class AuthorizeServiceImpl implements AuthorizeService { return discoverResult.getTotalSearchResults(); } + @Override + public boolean isAccountManager(Context context) { + try { + return (canCommunityAdminManageAccounts() && isCommunityAdmin(context) + || canCollectionAdminManageAccounts() && isCollectionAdmin(context)); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + private boolean performCheck(Context context, String query) throws SQLException { if (context.getCurrentUser() == null) { return false; diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 9f6171a220..6b097cdd73 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -592,4 +592,12 @@ public interface AuthorizeService { */ long countAdminAuthorizedCollection(Context context, String query) throws SearchServiceException, SQLException; + + /** + * Returns true if the current user can manage accounts. + * + * @param context context with the current user + * @return true if the current user can manage accounts + */ + boolean isAccountManager(Context context); } diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index c7d43eb523..9529bc3a8b 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -1139,6 +1139,50 @@ prevent the generation of resource policy entry values with null dspace_object a return !(hasCustomPolicy && isAnonimousGroup && datesAreNull); } + /** + * Returns an iterator of Items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param schema metadata field schema + * @param element metadata field element + * @param qualifier metadata field qualifier + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * An exception that provides information on a database access error or other errors. + * @throws AuthorizeException if authorization error + * Exception indicating the current user of the context does not have permission + * to perform a particular action. + */ + @Override + public Iterator findArchivedByMetadataField(Context context, + String schema, String element, String qualifier, String value) + throws SQLException, AuthorizeException { + MetadataSchema mds = metadataSchemaService.find(context, schema); + if (mds == null) { + throw new IllegalArgumentException("No such metadata schema: " + schema); + } + MetadataField mdf = metadataFieldService.findByElement(context, mds, element, qualifier); + if (mdf == null) { + throw new IllegalArgumentException( + "No such metadata field: schema=" + schema + ", element=" + element + ", qualifier=" + qualifier); + } + + if (Item.ANY.equals(value)) { + return itemDAO.findByMetadataField(context, mdf, null, true); + } else { + return itemDAO.findByMetadataField(context, mdf, value, true); + } + } + + @Override + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException { + String[] mdValueByField = getMDValueByField(metadataField); + return findArchivedByMetadataField(context, mdValueByField[0], mdValueByField[1], mdValueByField[2], value); + } + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY diff --git a/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java new file mode 100644 index 0000000000..8d929a8d3b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/EPersonAuthority.java @@ -0,0 +1,127 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.UUID; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.EPersonService; +import org.dspace.util.UUIDUtils; +import org.dspace.web.ContextUtil; + +/** + * Implementation of {@link ChoiceAuthority} based on EPerson. Allows you to set + * the id of an eperson as authority. + * + * @author Mykhaylo Boychuk (4science.it) + */ +public class EPersonAuthority implements ChoiceAuthority { + + private static final Logger log = LogManager.getLogger(EPersonAuthority.class); + + /** + * the name assigned to the specific instance by the PluginService, @see + * {@link NameAwarePlugin} + **/ + private String authorityName; + + private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); + + private AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService(); + + @Override + public Choices getBestMatch(String text, String locale) { + return getMatches(text, 0, 2, locale); + } + + @Override + public Choices getMatches(String text, int start, int limit, String locale) { + if (limit <= 0) { + limit = 20; + } + + Context context = getContext(); + + List ePersons = searchEPersons(context, text, start, limit); + + List choiceList = new ArrayList(); + for (EPerson eperson : ePersons) { + choiceList.add(new Choice(eperson.getID().toString(), eperson.getFullName(), eperson.getFullName())); + } + Choice[] results = new Choice[choiceList.size()]; + results = choiceList.toArray(results); + return new Choices(results, start, ePersons.size(), Choices.CF_AMBIGUOUS, ePersons.size() > (start + limit), 0); + } + + @Override + public String getLabel(String key, String locale) { + + UUID uuid = UUIDUtils.fromString(key); + if (uuid == null) { + return null; + } + + Context context = getContext(); + try { + EPerson ePerson = ePersonService.find(context, uuid); + return ePerson != null ? ePerson.getFullName() : null; + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private List searchEPersons(Context context, String text, int start, int limit) { + + if (!isCurrentUserAdminOrAccessGroupManager(context)) { + return Collections.emptyList(); + } + + try { + return ePersonService.search(context, text, start, limit); + } catch (SQLException e) { + log.error(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); + } + + } + + private Context getContext() { + Context context = ContextUtil.obtainCurrentRequestContext(); + return context != null ? context : new Context(); + } + + private boolean isCurrentUserAdminOrAccessGroupManager(Context context) { + try { + return authorizeService.isAdmin(context) || authorizeService.isAccountManager(context); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Override + public String getPluginInstanceName() { + return authorityName; + } + + @Override + public void setPluginInstanceName(String name) { + this.authorityName = name; + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java index 1675c9e833..8b7badf223 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/ItemService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/ItemService.java @@ -594,6 +594,37 @@ public interface ItemService */ public boolean canCreateNewVersion(Context context, Item item) throws SQLException; + /** + * Returns an iterator of in archive items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param schema metadata field schema + * @param element metadata field element + * @param qualifier metadata field qualifier + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Iterator findArchivedByMetadataField(Context context, String schema, + String element, String qualifier, + String value) throws SQLException, AuthorizeException; + + /** + * Returns an iterator of in archive items possessing the passed metadata field, or only + * those matching the passed value, if value is not Item.ANY + * + * @param context DSpace context object + * @param metadataField metadata + * @param value field value or Item.ANY to match any value + * @return an iterator over the items matching that authority value + * @throws SQLException if database error + * @throws AuthorizeException if authorization error + */ + public Iterator findArchivedByMetadataField(Context context, String metadataField, String value) + throws SQLException, AuthorizeException; + /** * Returns an iterator of Items possessing the passed metadata field, or only * those matching the passed value, if value is not Item.ANY @@ -633,7 +664,7 @@ public interface ItemService */ public Iterator findByAuthorityValue(Context context, String schema, String element, String qualifier, String value) - throws SQLException, AuthorizeException, IOException; + throws SQLException, AuthorizeException; public Iterator findByMetadataFieldAuthority(Context context, String mdString, String authority) diff --git a/dspace-api/src/main/java/org/dspace/core/Context.java b/dspace-api/src/main/java/org/dspace/core/Context.java index f5edcf2eb0..1b1ea9ccf8 100644 --- a/dspace-api/src/main/java/org/dspace/core/Context.java +++ b/dspace-api/src/main/java/org/dspace/core/Context.java @@ -10,6 +10,7 @@ package org.dspace.core; import java.sql.SQLException; import java.util.ArrayList; import java.util.Deque; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -91,12 +92,12 @@ public class Context implements AutoCloseable { /** * Group IDs of special groups user is a member of */ - private List specialGroups; + private Set specialGroups; /** * Temporary store for the specialGroups when the current user is temporary switched */ - private List specialGroupsPreviousState; + private Set specialGroupsPreviousState; /** * The currently used authentication method @@ -183,7 +184,7 @@ public class Context implements AutoCloseable { extraLogInfo = ""; ignoreAuth = false; - specialGroups = new ArrayList<>(); + specialGroups = new HashSet<>(); authStateChangeHistory = new ConcurrentLinkedDeque<>(); authStateClassCallHistory = new ConcurrentLinkedDeque<>(); @@ -656,6 +657,15 @@ public class Context implements AutoCloseable { return myGroups; } + /** + * Get a set of all of the special groups uuids that current user is a member of. + * + * @return list of special groups uuids + */ + public Set getSpecialGroupUuids() { + return CollectionUtils.isEmpty(specialGroups) ? Set.of() : specialGroups; + } + /** * Temporary change the user bound to the context, empty the special groups that * are retained to allow subsequent restore @@ -673,7 +683,7 @@ public class Context implements AutoCloseable { currentUserPreviousState = currentUser; specialGroupsPreviousState = specialGroups; - specialGroups = new ArrayList<>(); + specialGroups = new HashSet<>(); currentUser = newUser; } diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java index f894553e5d..cdb15f90f7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceImpl.java @@ -1174,7 +1174,7 @@ public class SolrServiceImpl implements SearchService, IndexingService { //DO NOT ESCAPE RANGE QUERIES ! if (!value.matches("\\[.*TO.*\\]")) { value = ClientUtils.escapeQueryChars(value); - filterQuery.append("(").append(value).append(")"); + filterQuery.append("\"").append(value).append("\""); } else { filterQuery.append(value); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java index 3c93e1c522..ae78c0feec 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/IndexFactoryImpl.java @@ -113,9 +113,11 @@ public abstract class IndexFactoryImpl implements log.info("Full text is larger than the configured limit (discovery.solr.fulltext.charLimit)." + " Only the first {} characters were indexed.", charLimit); } else { + log.error("Tika parsing error. Could not index full text.", saxe); throw new IOException("Tika parsing error. Could not index full text.", saxe); } } catch (TikaException ex) { + log.error("Tika parsing error. Could not index full text.", ex); throw new IOException("Tika parsing error. Could not index full text.", ex); } diff --git a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java index 004334e92d..1f4c4db53e 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/EPersonServiceImpl.java @@ -569,4 +569,9 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl impleme public int countTotal(Context context) throws SQLException { return ePersonDAO.countRows(context); } + + @Override + public String getName(EPerson dso) { + return dso.getName(); + } } diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index be81cd9bd8..c83dabcc4d 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -829,4 +829,9 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements final MetadataField metadataField) throws SQLException { return groupDAO.findByMetadataField(context, searchValue, metadataField); } + + @Override + public String getName(Group dso) { + return dso.getName(); + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java similarity index 95% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java rename to dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java index bd77c578e6..28d57975bf 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CacheLogger.java +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CacheLogger.java @@ -6,7 +6,7 @@ * http://www.dspace.org/license/ */ -package org.dspace.app.rest.cache; +package org.dspace.iiif.logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java similarity index 95% rename from dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java rename to dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java index eaa08000ee..2f1a8d6dba 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/cache/CanvasCacheLogger.java +++ b/dspace-api/src/main/java/org/dspace/iiif/logger/CanvasCacheLogger.java @@ -5,7 +5,7 @@ * * http://www.dspace.org/license/ */ -package org.dspace.app.rest.cache; +package org.dspace.iiif.logger; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java new file mode 100644 index 0000000000..e7d2d3398b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the ADS metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ADSFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "adsMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..8fbe4ef2cf --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/ads/ADSImportMetadataSourceServiceImpl.java @@ -0,0 +1,334 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.ads; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying ADS + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class ADSImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String resultFieldList; + + private String apiKey; + private int timeout = 1000; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "ads"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + return retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + @Override + public void init() throws Exception {} + + public String getApiKey() { + return apiKey; + } + + public void setApiKey(String apiKey) { + this.apiKey = apiKey; + } + + /** + * This class is a Callable implementation to get ADS entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + return search(query.getParameterAsClass("query", String.class), + query.getParameterAsClass("start", Integer.class), + query.getParameterAsClass("count", Integer.class), + getApiKey()); + } + } + + /** + * This class is a Callable implementation to get an ADS entry using bibcode + * The bibcode to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + String queryString = "bibcode:" + query.getParameterAsClass("id", String.class); + return search(queryString, 0 , 1, getApiKey()); + } + } + + /** + * This class is a Callable implementation to search ADS entries + * using author and title and year. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer year = query.getParameterAsClass("year", Integer.class); + return search(title, author, year, start, count, getApiKey()); + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an ADS query. + * This Callable use as query value to ADS the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + private Query query; + + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + return count(query.getParameterAsClass("query", String.class), getApiKey()); + } + } + + private List search(String title, String author, int year, int start, int count, String token) { + String query = ""; + if (StringUtils.isNotBlank(title)) { + query += "title:" + title; + } + if (StringUtils.isNotBlank(author)) { + String splitRegex = "(\\s*,\\s+|\\s*;\\s+|\\s*;+|\\s*,+|\\s+)"; + String[] authors = author.split(splitRegex); + // [FAU] + if (StringUtils.isNotBlank(query)) { + query = "author:"; + } else { + query += "&fq=author:"; + } + int x = 0; + for (String auth : authors) { + x++; + query += auth; + if (x < authors.length) { + query += " AND "; + } + } + } + if (year != -1) { + // [DP] + if (StringUtils.isNotBlank(query)) { + query = "year:"; + } else { + query += "&fq=year:"; + } + query += year; + } + return search(query.toString(), start, count, token); + } + + public Integer count(String query, String token) { + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", "1"); + uriBuilder.addParameter("start", "0"); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + return jsonNode.at("/response/numFound").asInt(); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return 0; + } + + public List search(String query, Integer start, Integer count, String token) { + List adsResults = new ArrayList<>(); + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + token); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.url); + uriBuilder.addParameter("q", query); + uriBuilder.addParameter("rows", count.toString()); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("fl", this.resultFieldList); + + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + JsonNode docs = jsonNode.at("/response/docs"); + if (docs.isArray()) { + Iterator nodes = docs.elements(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + adsResults.add(transformSourceRecords(node.toString())); + } + } else { + adsResults.add(transformSourceRecords(docs.toString())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + return adsResults; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + + public void setResultFieldList(String resultFieldList) { + this.resultFieldList = resultFieldList; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java new file mode 100644 index 0000000000..abf84f52d0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefAuthorMetadataProcessor.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; + +/** + * This class is used for CrossRef's Live-Import to extract + * attributes such as "given" and "family" from the array of authors/editors + * and return them concatenated. + * Beans are configured in the crossref-integration.xml file. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class CrossRefAuthorMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator authors = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (authors.hasNext()) { + JsonNode author = authors.next(); + String givenName = author.at("/given").textValue(); + String familyName = author.at("/family").textValue(); + if (StringUtils.isNoneBlank(givenName) && StringUtils.isNoneBlank(familyName)) { + values.add(givenName + " " + familyName); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java new file mode 100644 index 0000000000..5e879b4d26 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the CrossRef metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class CrossRefFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "crossrefMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..7dde330b27 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefImportMetadataSourceServiceImpl.java @@ -0,0 +1,336 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.DoiCheck; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying CrossRef + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CrossRefImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public String getImportSource() { + return "crossref"; + } + + @Override + public void init() throws Exception {} + + @Override + public ImportRecord getRecord(String recordId) throws MetadataSourceException { + String id = getID(recordId); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(recordId)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + String id = getID(query); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new DoiCheckCallable(id)) : retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + if (StringUtils.isNotBlank(id)) { + return retry(new SearchByIdCallable(id)); + } + return retry(new SearchByQueryCallable(query)); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + List records = StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new SearchByIdCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String id = getID(query.toString()); + return StringUtils.isNotBlank(id) ? retry(new SearchByIdCallable(id)) + : retry(new FindMatchingRecordCallable(query)); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for CrossRef"); + } + + public String getID(String id) { + return DoiCheck.isDoi(id) ? "filter=doi:" + id : StringUtils.EMPTY; + } + + /** + * This class is a Callable implementation to get CrossRef entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(response); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + results.add(transformSourceRecords(node.toString())); + } + return results; + } + + } + + /** + * This class is a Callable implementation to get an CrossRef entry using DOI + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + private Query query; + + private SearchByIdCallable(Query query) { + this.query = query; + } + + private SearchByIdCallable(String id) { + this.query = new Query(); + query.addParameter("id", id); + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String ID = URLDecoder.decode(query.getParameterAsClass("id", String.class), "UTF-8"); + URIBuilder uriBuilder = new URIBuilder(url + "/" + ID); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + JsonNode messageNode = jsonNode.at("/message"); + results.add(transformSourceRecords(messageNode.toString())); + return results; + } + } + + /** + * This class is a Callable implementation to search CrossRef entries using author and title. + * There are two field in the Query map to pass, with keys "title" and "author" + * (at least one must be used). + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindMatchingRecordCallable implements Callable> { + + private Query query; + + private FindMatchingRecordCallable(Query q) { + query = q; + } + + @Override + public List call() throws Exception { + String queryValue = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + String bibliographics = query.getParameterAsClass("bibliographics", String.class); + List results = new ArrayList<>(); + URIBuilder uriBuilder = new URIBuilder(url); + if (Objects.nonNull(queryValue)) { + uriBuilder.addParameter("query", queryValue); + } + if (Objects.nonNull(count)) { + uriBuilder.addParameter("rows", count.toString()); + } + if (Objects.nonNull(start)) { + uriBuilder.addParameter("offset", start.toString()); + } + if (Objects.nonNull(author)) { + uriBuilder.addParameter("query.author", author); + } + if (Objects.nonNull(title )) { + uriBuilder.addParameter("query.container-title", title); + } + if (Objects.nonNull(bibliographics)) { + uriBuilder.addParameter("query.bibliographic", bibliographics); + } + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(resp); + Iterator nodes = jsonNode.at("/message/items").iterator(); + while (nodes.hasNext()) { + JsonNode node = nodes.next(); + results.add(transformSourceRecords(node.toString())); + } + return results; + } + + } + + /** + * This class is a Callable implementation to count the number of entries for an CrossRef query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + private CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + private CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("query", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return jsonNode.at("/message/total-results").asInt(); + } + } + + /** + * This class is a Callable implementation to check if exist an CrossRef entry using DOI. + * The DOI to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * return 1 if CrossRef entry exists otherwise 0 + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class DoiCheckCallable implements Callable { + + private final Query query; + + private DoiCheckCallable(final String id) { + final Query query = new Query(); + query.addParameter("id", id); + this.query = query; + } + + private DoiCheckCallable(final Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + "/" + query.getParameterAsClass("id", String.class)); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode jsonNode = convertStringJsonToJsonNode(responseString); + return StringUtils.equals(jsonNode.at("/status").toString(), "ok") ? 1 : 0; + } + } + + private JsonNode convertStringJsonToJsonNode(String json) { + try { + return new ObjectMapper().readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return null; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java new file mode 100644 index 0000000000..64ec53ffb9 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoFieldMapping.java @@ -0,0 +1,36 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Epo metadatum fields on the DSpace metadatum fields + * + * @author Pasquale Cavallo (pasquale.cavallo at 4science dot it) + */ +public class EpoFieldMapping extends AbstractMetadataFieldMapping { + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @Resource(name = "epoMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..7240e356e3 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/epo/service/EpoImportMetadataSourceServiceImpl.java @@ -0,0 +1,541 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.epo.service; + +import static org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl.HEADER_PARAMETERS; + +import java.io.IOException; +import java.io.StringReader; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpException; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.xerces.impl.dv.util.Base64; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.contributor.EpoIdMetadataContributor.EpoDocumentId; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Document; +import org.jdom2.Element; +import org.jdom2.JDOMException; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.input.SAXBuilder; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying EPO + * + * @author Pasquale Cavallo (pasquale.cavallo at 4Science dot it) + */ +public class EpoImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String authUrl; + private String searchUrl; + + private String consumerKey; + private String consumerSecret; + + private MetadataFieldConfig dateFiled; + private MetadataFieldConfig applicationNumber; + + public static final String APP_NO_DATE_SEPARATOR = "$$$"; + private static final String APP_NO_DATE_SEPARATOR_REGEX = "\\$\\$\\$"; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + /** + * The string that identifies this import implementation. Preferable a URI + * + * @return the identifying uri + */ + @Override + public String getImportSource() { + return "epo"; + } + + /** + * Set the customer epo key + * @param consumerKey the customer consumer key + */ + public void setConsumerKey(String consumerKey) { + this.consumerKey = consumerKey; + } + + public String getConsumerKey() { + return consumerKey; + } + + /** + * Set the costumer epo secret + * @param consumerSecret the customer epo secret + */ + public void setConsumerSecret(String consumerSecret) { + this.consumerSecret = consumerSecret; + } + + public String getConsumerSecret() { + return consumerSecret; + } + + public void setDateFiled(MetadataFieldConfig dateFiled) { + this.dateFiled = dateFiled; + } + + public MetadataFieldConfig getDateFiled() { + return dateFiled; + } + + public void setApplicationNumber(MetadataFieldConfig applicationNumber) { + this.applicationNumber = applicationNumber; + } + + public MetadataFieldConfig getApplicationNumber() { + return applicationNumber; + } + + /*** + * Log to EPO, bearer is valid for 20 minutes + * + * @param consumerKey The consumer Key + * @param consumerSecretKey The consumer secret key + * @return + * @throws IOException + * @throws HttpException + */ + protected String login() throws IOException, HttpException { + Map> params = getLoginParams(); + String entity = "grant_type=client_credentials"; + String json = liveImportClient.executeHttpPostRequest(this.authUrl, params, entity); + ObjectMapper mapper = new ObjectMapper(new JsonFactory()); + JsonNode rootNode = mapper.readTree(json); + JsonNode accessTokenNode = rootNode.get("access_token"); + return accessTokenNode.asText(); + } + + private Map> getLoginParams() { + Map> params = new HashMap>(); + Map headerParams = getLoginHeaderParams(); + params.put(HEADER_PARAMETERS, headerParams); + return params; + } + + private Map getLoginHeaderParams() { + Map params = new HashMap(); + String authString = consumerKey + ":" + consumerSecret; + params.put("Authorization", "Basic " + Base64.encode(authString.getBytes())); + params.put("Content-type", "application/x-www-form-urlencoded"); + return params; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return 0; + + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new CountRecordsCallable(query, bearer)); + } catch (IOException | HttpException e) { + e.printStackTrace(); + } + } + return 0; + } + + @Override + public Collection getRecords(String query, int start, + int count) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer, start, count)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public Collection getRecords(Query query) + throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + return retry(new SearchByQueryCallable(query, bearer)); + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return new ArrayList(); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + try { + String bearer = login(); + List list = retry(new SearchByIdCallable(id, bearer)); + return CollectionUtils.isNotEmpty(list) ? list.get(0) : null; + } catch (IOException | HttpException e) { + log.warn(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); + } + } + return null; + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Item item) + throws MetadataSourceException { + return null; + } + + @Override + public Collection findMatchingRecords(Query query) + throws MetadataSourceException { + return null; + } + + /** + * This class is a Callable implementation to count the number of entries for an EPO query. + * This Callable use as query value to EPO the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountRecordsCallable implements Callable { + + private String bearer; + private String query; + + private CountRecordsCallable(Query query, String bearer) { + this.query = query.getParameterAsClass("query", String.class); + this.bearer = bearer; + } + + private CountRecordsCallable(String query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public Integer call() throws Exception { + return countDocument(bearer, query); + } + } + + /** + * This class is a Callable implementation to get an EPO entry using epodocID (epodoc:AB1234567T) + * The epodocID to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByIdCallable implements Callable> { + + private String id; + private String bearer; + + private SearchByIdCallable(String id, String bearer) { + this.id = id; + this.bearer = bearer; + } + + public List call() throws Exception { + int positionToSplit = id.indexOf(":"); + String docType = EpoDocumentId.EPODOC; + String idS = id; + if (positionToSplit != -1) { + docType = id.substring(0, positionToSplit); + idS = id.substring(positionToSplit + 1, id.length()); + } else if (id.contains(APP_NO_DATE_SEPARATOR)) { + // special case the id is the combination of the applicationnumber and date filed + String query = "applicationnumber=" + id.split(APP_NO_DATE_SEPARATOR_REGEX)[0]; + SearchByQueryCallable search = new SearchByQueryCallable(query, bearer, 0, 10); + List records = search.call().stream() + .filter(r -> r.getValue(dateFiled.getSchema(), dateFiled.getElement(), + dateFiled.getQualifier()) + .stream() + .anyMatch(m -> StringUtils.equals(m.getValue(), + id.split(APP_NO_DATE_SEPARATOR_REGEX)[1]) + )) + .limit(1).collect(Collectors.toList()); + return records; + } + List records = searchDocument(bearer, idS, docType); + if (records.size() > 1) { + log.warn("More record are returned with epocID " + id); + } + return records; + } + } + + /** + * This class is a Callable implementation to get EPO entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + private Integer start; + private Integer count; + private String bearer; + + private SearchByQueryCallable(Query query, String bearer) { + this.query = query; + this.bearer = bearer; + } + + public SearchByQueryCallable(String queryValue, String bearer, int start, int count) { + this.query = new Query(); + query.addParameter("query", queryValue); + this.start = query.getParameterAsClass("start", Integer.class) != null ? + query.getParameterAsClass("start", Integer.class) : 0; + this.count = query.getParameterAsClass("count", Integer.class) != null ? + query.getParameterAsClass("count", Integer.class) : 20; + this.bearer = bearer; + } + + @Override + public List call() throws Exception { + List records = new ArrayList(); + String queryString = query.getParameterAsClass("query", String.class); + if (StringUtils.isNotBlank(consumerKey) && StringUtils.isNotBlank(consumerSecret)) { + if (StringUtils.isNotBlank(queryString) && StringUtils.isNotBlank(bearer)) { + List epoDocIds = searchDocumentIds(bearer, queryString, start + 1, count); + for (EpoDocumentId epoDocId : epoDocIds) { + List recordfounds = searchDocument(bearer, epoDocId); + if (recordfounds.size() > 1) { + log.warn("More record are returned with epocID " + epoDocId.toString()); + } + records.addAll(recordfounds); + } + } + + } + return records; + } + } + + private Integer countDocument(String bearer, String query) { + if (StringUtils.isBlank(bearer)) { + return null; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + headerParameters.put("X-OPS-Range", "1-1"); + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + + String totalRes = getElement(root, namespaces, "//ops:biblio-search/@total-result-count"); + return Integer.parseInt(totalRes); + } catch (JDOMException | IOException | URISyntaxException | JaxenException e) { + log.error(e.getMessage(), e); + return null; + } + } + + private List searchDocumentIds(String bearer, String query, int start, int count) { + List results = new ArrayList(); + int end = start + count; + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + if (start >= 1 && end > start) { + headerParameters.put("X-OPS-Range", start + "-" + end); + } + params.put(HEADER_PARAMETERS, headerParameters); + + URIBuilder uriBuilder = new URIBuilder(this.searchUrl); + uriBuilder.addParameter("q", query); + + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(response)); + Element root = document.getRootElement(); + + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance() + .compile("//ns:document-id", Filters.element(), null, namespaces); + + List documentIds = xpath.evaluate(root); + for (Element documentId : documentIds) { + results.add(new EpoDocumentId(documentId, namespaces)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List searchDocument(String bearer, EpoDocumentId id) { + return searchDocument(bearer, id.getId(), id.getDocumentIdType()); + } + + private List searchDocument(String bearer, String id, String docType) { + List results = new ArrayList(); + if (StringUtils.isBlank(bearer)) { + return results; + } + try { + Map> params = new HashMap>(); + Map headerParameters = new HashMap(); + headerParameters.put("Authorization", "Bearer " + bearer); + params.put(HEADER_PARAMETERS, headerParameters); + + String url = this.url.replace("$(doctype)", docType).replace("$(id)", id); + + String response = liveImportClient.executeHttpGetRequest(1000, url, params); + List elements = splitToRecords(response); + for (Element element : elements) { + results.add(transformSourceRecords(element)); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + return results; + } + + private List splitToRecords(String recordsSrc) { + try { + SAXBuilder saxBuilder = new SAXBuilder(); + Document document = saxBuilder.build(new StringReader(recordsSrc)); + Element root = document.getRootElement(); + List namespaces = Arrays.asList(Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile("//ns:exchange-document", + Filters.element(), null, namespaces); + + List recordsList = xpath.evaluate(root); + return recordsList; + } catch (JDOMException | IOException e) { + log.error(e.getMessage(), e); + return new LinkedList(); + } + } + + private String getElement(Element document, List namespaces, String path) throws JaxenException { + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(document); + //exactly one element expected for any field + if (CollectionUtils.isEmpty(nodes)) { + return StringUtils.EMPTY; + } else { + return getValue(nodes.get(0)); + } + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + log.error("node of type: " + el.getClass()); + return ""; + } + } + + public void setUrl(String url) { + this.url = url; + } + + public void setAuthUrl(String authUrl) { + this.authUrl = authUrl; + } + + public void setSearchUrl(String searchUrl) { + this.searchUrl = searchUrl; + } + +} diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java new file mode 100644 index 0000000000..829b5ed2de --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClient.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.util.Map; + +/** + * Interface for classes that allow to contact LiveImport clients. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public interface LiveImportClient { + + /** + * Http GET request + * + * @param timeout The connect timeout in milliseconds + * @param URL URL + * @param requestParams This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return The response in String type converted from InputStream + */ + public String executeHttpGetRequest(int timeout, String URL, Map> params); + + /** + * Http POST request + * + * @param URL URL + * @param params This map contains the header params to be included in the request. + * @param entry the entity value + * @return the response in String type converted from InputStream + */ + public String executeHttpPostRequest(String URL, Map> params, String entry); +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java new file mode 100644 index 0000000000..a332b78f3b --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/liveimportclient/service/LiveImportClientImpl.java @@ -0,0 +1,187 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.liveimportclient.service; + +import java.io.InputStream; +import java.net.URISyntaxException; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpHost; +import org.apache.http.HttpResponse; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.config.RequestConfig.Builder; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implementation of {@link LiveImportClient}. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science dot com) + */ +public class LiveImportClientImpl implements LiveImportClient { + + private final static Logger log = LogManager.getLogger(); + + public static final String URI_PARAMETERS = "uriParameters"; + public static final String HEADER_PARAMETERS = "headerParameters"; + + private CloseableHttpClient httpClient; + + @Autowired + private ConfigurationService configurationService; + + @Override + public String executeHttpGetRequest(int timeout, String URL, Map> params) { + HttpGet method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + requestConfigBuilder.setConnectionRequestTimeout(timeout); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + method = new HttpGet(buildUrl(URL, params.get(URI_PARAMETERS))); + method.setConfig(defaultRequestConfig); + + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + + configureProxy(method, defaultRequestConfig); + + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException(); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + @Override + public String executeHttpPostRequest(String URL, Map> params, String entry) { + HttpPost method = null; + try (CloseableHttpClient httpClient = Optional.ofNullable(this.httpClient) + .orElseGet(HttpClients::createDefault)) { + + Builder requestConfigBuilder = RequestConfig.custom(); + RequestConfig defaultRequestConfig = requestConfigBuilder.build(); + + method = new HttpPost(buildUrl(URL, params.get(URI_PARAMETERS))); + method.setConfig(defaultRequestConfig); + if (StringUtils.isNotBlank(entry)) { + method.setEntity(new StringEntity(entry)); + } + setHeaderParams(method, params); + + configureProxy(method, defaultRequestConfig); + + HttpResponse httpResponse = httpClient.execute(method); + if (isNotSuccessfull(httpResponse)) { + throw new RuntimeException(); + } + InputStream inputStream = httpResponse.getEntity().getContent(); + return IOUtils.toString(inputStream, Charset.defaultCharset()); + } catch (Exception e1) { + log.error(e1.getMessage(), e1); + } finally { + if (Objects.nonNull(method)) { + method.releaseConnection(); + } + } + return StringUtils.EMPTY; + } + + private void configureProxy(HttpRequestBase method, RequestConfig defaultRequestConfig) { + String proxyHost = configurationService.getProperty("http.proxy.host"); + String proxyPort = configurationService.getProperty("http.proxy.port"); + if (StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort)) { + RequestConfig requestConfig = RequestConfig.copy(defaultRequestConfig) + .setProxy(new HttpHost(proxyHost, Integer.parseInt(proxyPort), "http")) + .build(); + method.setConfig(requestConfig); + } + } + + /** + * Allows to set the header parameters to the HTTP Post method + * + * @param method HttpPost method + * @param params This map contains the header params to be included in the request. + */ + private void setHeaderParams(HttpPost method, Map> params) { + Map headerParams = params.get(HEADER_PARAMETERS); + if (MapUtils.isNotEmpty(headerParams)) { + for (String param : headerParams.keySet()) { + method.setHeader(param, headerParams.get(param)); + } + } + } + + /** + * This method allows you to add the parameters contained in the requestParams map to the URL + * + * @param URL URL + * @param requestParams This map contains the parameters to be included in the request. + * Each parameter will be added to the url?(key=value) + * @return + * @throws URISyntaxException + */ + private String buildUrl(String URL, Map requestParams) throws URISyntaxException { + URIBuilder uriBuilder = new URIBuilder(URL); + if (MapUtils.isNotEmpty(requestParams)) { + for (String param : requestParams.keySet()) { + uriBuilder.setParameter(param, requestParams.get(param)); + } + } + return uriBuilder.toString(); + } + + private boolean isNotSuccessfull(HttpResponse response) { + int statusCode = getStatusCode(response); + return statusCode < 200 || statusCode > 299; + } + + private int getStatusCode(HttpResponse response) { + return response.getStatusLine().getStatusCode(); + } + + public CloseableHttpClient getHttpClient() { + return httpClient; + } + + public void setHttpClient(CloseableHttpClient httpClient) { + this.httpClient = httpClient; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java new file mode 100644 index 0000000000..b938a290c2 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/ArrayElementAttributeProcessor.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract attribute values of an array. + * For exaple to extract all values of secondAttribute, + * "array":[ + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * }, + * { + * "firstAttribute":"first value", + * "secondAttribute":"second value" + * } + * ] + * + * it's possible configure a bean with + * pathToArray=/array and elementAttribute=/secondAttribute + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class ArrayElementAttributeProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + private String elementAttribute; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + String value = element.at(elementAttribute).textValue(); + if (StringUtils.isNoneBlank(value)) { + values.add(value); + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + + public void setElementAttribute(String elementAttribute) { + this.elementAttribute = elementAttribute; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java new file mode 100644 index 0000000000..00b414c485 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/EpoIdMetadataContributor.java @@ -0,0 +1,312 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import javax.annotation.Resource; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jaxen.JaxenException; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; +import org.springframework.beans.factory.annotation.Required; + +/** + * Custom MetadataContributor to manage Epo ID. + * Need as input element and all children. + * + * @author Pasquale Cavallo + */ +public class EpoIdMetadataContributor implements MetadataContributor { + + protected MetadataFieldConfig field; + + private boolean needType; + + /** + * This property will be used in ID definition. + * If this is true, id will be in the form docType:EpoID, otherwise EpoID will be returned + * + * @param needType if true, docType will be included in id definition + */ + public void setNeedType(boolean needType) { + this.needType = needType; + } + + /** + * Return prefixToNamespaceMapping + * + * @return a prefixToNamespaceMapping map + */ + public Map getPrefixToNamespaceMapping() { + return prefixToNamespaceMapping; + } + + protected MetadataFieldMapping> metadataFieldMapping; + + /** + * Return metadataFieldMapping + * + * @return MetadataFieldMapping + */ + public MetadataFieldMapping> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + /** + * Set the metadataFieldMapping of this SimpleXpathMetadatumContributor + * + * @param metadataFieldMapping the new mapping. + */ + public void setMetadataFieldMapping( + MetadataFieldMapping> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + + /** + * Set the prefixToNamespaceMapping for this object, + * + * @param prefixToNamespaceMapping the new mapping. + */ + @Resource(name = "isiFullprefixMapping") + public void setPrefixToNamespaceMapping(Map prefixToNamespaceMapping) { + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + } + + protected Map prefixToNamespaceMapping; + + /** + * Initialize EpoIdMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query query string + * @param prefixToNamespaceMapping metadata prefix to namespace mapping + * @param field + * MetadataFieldConfig + */ + public EpoIdMetadataContributor(String query, Map prefixToNamespaceMapping, + MetadataFieldConfig field) { + this.query = query; + this.prefixToNamespaceMapping = prefixToNamespaceMapping; + this.field = field; + } + + /** + * Empty constructor for EpoIdMetadataContributor + */ + public EpoIdMetadataContributor() { + + } + + protected String query; + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + @Required + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create an xpathExpression on, this query is used to + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + @Required + public void setQuery(String query) { + this.query = query; + } + + /** + * Retrieve the metadata associated with the given object. + * Depending on the retrieved node (using the query), different types of values will be added to the MetadatumDTO + * list + * + * @param t A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + try { + List namespaces = Arrays.asList( + Namespace.getNamespace("xlink", "http://www.w3.org/1999/xlink"), + Namespace.getNamespace("ops", "http://ops.epo.org"), + Namespace.getNamespace("ns", "http://www.epo.org/exchange")); + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.element(), null, + namespaces); + List elements = xpath.evaluate(element); + for (Element el : elements) { + EpoDocumentId document = new EpoDocumentId(el, namespaces); + MetadatumDTO metadatum = new MetadatumDTO(); + metadatum.setElement(field.getElement()); + metadatum.setQualifier(field.getQualifier()); + metadatum.setSchema(field.getSchema()); + if (needType) { + metadatum.setValue(document.getIdAndType()); + } else { + metadatum.setValue(document.getId()); + } + values.add(metadatum); + } + return values; + } catch (JaxenException e) { + System.err.println(query); + throw new RuntimeException(e); + } + } + + /** + * This class maps EPO's response metadata needs to extract epo ID. + * + * @author Pasquale Cavallo + * + */ + public static class EpoDocumentId { + + private String documentIdType; + private String country; + private String docNumber; + private String kind; + private String date; + private List namespaces; + + + public static final String DOCDB = "docdb"; + public static final String EPODOC = "epodoc"; + public static final String ORIGIN = "origin"; + + + public EpoDocumentId(Element documentId, List namespaces) throws JaxenException { + this.namespaces = namespaces; + Element preferredId = null; + XPathExpression xpath = XPathFactory.instance().compile( + "./ns:document-id[@document-id-type=\"epodoc\"]", Filters.fpassthrough(), null, namespaces); + + List nodes = xpath.evaluate(documentId); + if (CollectionUtils.isNotEmpty(nodes)) { + preferredId = (Element) nodes.get(0); + } + if (Objects.isNull(preferredId)) { + preferredId = documentId; + } + + this.documentIdType = buildDocumentIdType(preferredId); + this.country = buildCountry(preferredId); + this.docNumber = buildDocNumber(preferredId); + this.kind = buildKind(preferredId); + this.date = buildDate(preferredId); + } + + private String buildDocumentIdType(Element documentId) throws JaxenException { + return getElement(documentId, "./@document-id-type"); + } + + private String buildCountry(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:country"); + } + + private String buildDocNumber(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:doc-number"); + } + + private String buildKind(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:kind"); + } + + private String buildDate(Element documentId) throws JaxenException { + return getElement(documentId, "./ns:date"); + } + + + public String getDocumentIdType() { + return documentIdType; + } + + /** + * This method compute the epo ID from fields + * + * @return the EPO id + */ + public String getId() { + if (DOCDB.equals(documentIdType)) { + return country + "." + docNumber + "." + kind; + } else if (EPODOC.equals(documentIdType)) { + return docNumber + ((kind != null) ? kind : StringUtils.EMPTY); + } else { + return StringUtils.EMPTY; + } + } + + public String getIdAndType() { + if (EPODOC.equals(documentIdType)) { + return documentIdType + ":" + docNumber + ((kind != null) ? kind : ""); + } else if (DOCDB.equals(documentIdType)) { + return documentIdType + ":" + country + "." + docNumber + "." + kind; + } else { + return StringUtils.EMPTY; + } + } + + + private String getElement(Element documentId, String path) throws JaxenException { + if (Objects.isNull(documentId)) { + return StringUtils.EMPTY; + } + XPathExpression xpath = XPathFactory.instance().compile(path, Filters.fpassthrough(), null, + namespaces); + List nodes = xpath.evaluate(documentId); + //exactly one element expected for any field + return CollectionUtils.isNotEmpty(nodes) ? getValue(nodes.get(0)) : StringUtils.EMPTY; + } + + private String getValue(Object el) { + if (el instanceof Element) { + return ((Element) el).getText(); + } else if (el instanceof Attribute) { + return ((Attribute) el).getValue(); + } else if (el instanceof String) { + return (String)el; + } else if (el instanceof Text) { + return ((Text) el).getText(); + } else { + return StringUtils.EMPTY; + } + } + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java new file mode 100644 index 0000000000..2de0c6a0bb --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/JsonPathMetadataProcessor.java @@ -0,0 +1,23 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; + +/** + * Service interface class for processing json object. + * The implementation of this class is responsible for all business logic calls + * for extracting of values from json object. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public interface JsonPathMetadataProcessor { + + public Collection processMetadata(String json); + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java new file mode 100644 index 0000000000..c8e93971f4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/MatrixElementProcessor.java @@ -0,0 +1,87 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * This Processor allows to extract all values of a matrix. + * Only need to configure the path to the matrix in "pathToMatrix" + * For exaple to extract all values + * "matrix": [ + * [ + * "first", + * "second" + * ], + * [ + * "third" + * ], + * [ + * "fourth", + * "fifth" + * ] + * ], + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class MatrixElementProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToMatrix; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator array = rootNode.at(pathToMatrix).elements(); + Collection values = new ArrayList<>(); + while (array.hasNext()) { + JsonNode element = array.next(); + if (element.isArray()) { + Iterator nodes = element.iterator(); + while (nodes.hasNext()) { + String nodeValue = nodes.next().textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } else { + String nodeValue = element.textValue(); + if (StringUtils.isNotBlank(nodeValue)) { + values.add(nodeValue); + } + } + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToMatrix(String pathToMatrix) { + this.pathToMatrix = pathToMatrix; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java new file mode 100644 index 0000000000..f739980220 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleJsonPathMetadataContributor.java @@ -0,0 +1,181 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Objects; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * A simple JsonPath Metadata processor + * that allow extract value from json object + * by configuring the path in the query variable via the bean. + * moreover this can also perform more compact extractions + * by configuring specific json processor in "metadataProcessor" + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class SimpleJsonPathMetadataContributor implements MetadataContributor { + + private final static Logger log = LogManager.getLogger(); + + private String query; + + private MetadataFieldConfig field; + + protected JsonPathMetadataProcessor metadataProcessor; + + /** + * Initialize SimpleJsonPathMetadataContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig + * + * @param query The JSonPath query + * @param field the matadata field to map the result of the Json path query + * MetadataFieldConfig + */ + public SimpleJsonPathMetadataContributor(String query, MetadataFieldConfig field) { + this.query = query; + this.field = field; + } + + + /** + * Unused by this implementation + */ + @Override + public void setMetadataFieldMapping(MetadataFieldMapping> rt) { + + } + + /** + * Empty constructor for SimpleJsonPathMetadataContributor + */ + public SimpleJsonPathMetadataContributor() { + + } + + /** + * Return the MetadataFieldConfig used while retrieving MetadatumDTO + * + * @return MetadataFieldConfig + */ + public MetadataFieldConfig getField() { + return field; + } + + /** + * Setting the MetadataFieldConfig + * + * @param field MetadataFieldConfig used while retrieving MetadatumDTO + */ + public void setField(MetadataFieldConfig field) { + this.field = field; + } + + /** + * Return query used to create the JSonPath + * + * @return the query this instance is based on + */ + public String getQuery() { + return query; + } + + /** + * Return query used to create the JSonPath + * + */ + public void setQuery(String query) { + this.query = query; + } + + /** + * Used to process data got by jsonpath expression, like arrays to stringify, change date format or else + * If it is null, toString will be used. + * + * @param metadataProcessor + */ + public void setMetadataProcessor(JsonPathMetadataProcessor metadataProcessor) { + this.metadataProcessor = metadataProcessor; + } + + /** + * Retrieve the metadata associated with the given object. + * The toString() of the resulting object will be used. + * + * @param t A class to retrieve metadata from. + * @return a collection of import records. Only the identifier of the found records may be put in the record. + */ + @Override + public Collection contributeMetadata(String fullJson) { + Collection metadata = new ArrayList<>(); + Collection metadataValue = new ArrayList<>(); + if (Objects.nonNull(metadataProcessor)) { + metadataValue = metadataProcessor.processMetadata(fullJson); + } else { + JsonNode jsonNode = convertStringJsonToJsonNode(fullJson); + JsonNode node = jsonNode.at(query); + if (node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + String nodeValue = getStringValue(nodes.next()); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } else if (!node.isNull() && StringUtils.isNotBlank(node.toString())) { + String nodeValue = getStringValue(node); + if (StringUtils.isNotBlank(nodeValue)) { + metadataValue.add(nodeValue); + } + } + } + for (String value : metadataValue) { + MetadatumDTO metadatumDto = new MetadatumDTO(); + metadatumDto.setValue(value); + metadatumDto.setElement(field.getElement()); + metadatumDto.setQualifier(field.getQualifier()); + metadatumDto.setSchema(field.getSchema()); + metadata.add(metadatumDto); + } + return metadata; + } + + private String getStringValue(JsonNode node) { + if (node.isTextual()) { + return node.textValue(); + } + if (node.isNumber()) { + return node.numberValue().toString(); + } + log.error("It wasn't possible to convert the value of the following JsonNode:" + node.asText()); + return StringUtils.EMPTY; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java new file mode 100644 index 0000000000..5dd354c6f1 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataConcatContributor.java @@ -0,0 +1,59 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * This contributor extends SimpleRisToMetadataContributor, + * in particular, this one is able to chain multi values into a single one + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataConcatContributor extends SimpleRisToMetadataContributor { + + private String tag; + + private MetadataFieldConfig metadata; + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + List fieldValues = record.get(this.tag); + Optional.ofNullable(fieldValues) + .map(fv -> fv.stream()) + .map(s -> s.collect(Collectors.joining(" "))) + .ifPresent(t -> values.add(this.metadataFieldMapping.toDCValue(this.metadata, t))); + return values; + } + + public String getTag() { + return tag; + } + + public void setTag(String tag) { + this.tag = tag; + } + + public MetadataFieldConfig getMetadata() { + return metadata; + } + + public void setMetadata(MetadataFieldConfig metadata) { + this.metadata = metadata; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java new file mode 100644 index 0000000000..36ea0dd478 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleRisToMetadataContributor.java @@ -0,0 +1,71 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * Metadata contributor that takes a record defined as Map> + * and turns it into metadatums configured in fieldToMetadata + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.it) + */ +public class SimpleRisToMetadataContributor implements MetadataContributor>> { + + protected Map fieldToMetadata; + + protected MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping; + + public SimpleRisToMetadataContributor() {} + + public SimpleRisToMetadataContributor(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + @Override + public Collection contributeMetadata(Map> record) { + List values = new LinkedList<>(); + for (String field : fieldToMetadata.keySet()) { + List fieldValues = record.get(field); + if (Objects.nonNull(fieldValues)) { + for (String value : fieldValues) { + values.add(metadataFieldMapping.toDCValue(fieldToMetadata.get(field), value)); + } + } + } + return values; + } + + public Map getFieldToMetadata() { + return fieldToMetadata; + } + + public void setFieldToMetadata(Map fieldToMetadata) { + this.fieldToMetadata = fieldToMetadata; + } + + public MetadataFieldMapping>, + MetadataContributor>>> getMetadataFieldMapping() { + return metadataFieldMapping; + } + + public void setMetadataFieldMapping(MetadataFieldMapping>, + MetadataContributor>>> metadataFieldMapping) { + this.metadataFieldMapping = metadataFieldMapping; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java new file mode 100644 index 0000000000..fb15cd60ab --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathDateFormatMetadataContributor.java @@ -0,0 +1,91 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.metadatamapping.contributor; + +import java.text.DateFormat; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; + +import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.jdom2.Attribute; +import org.jdom2.Element; +import org.jdom2.Namespace; +import org.jdom2.Text; +import org.jdom2.filter.Filters; +import org.jdom2.xpath.XPathExpression; +import org.jdom2.xpath.XPathFactory; + +/** + * This contributor can be used when parsing an XML file, + * particularly to extract a date and convert it to a specific format. + * In the variable dateFormatFrom the read format should be configured, + * instead in the variable dateFormatTo the format you want to obtain. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SimpleXpathDateFormatMetadataContributor extends SimpleXpathMetadatumContributor { + + private DateFormat dateFormatFrom; + private DateFormat dateFormatTo; + + public void setDateFormatFrom(String dateFormatFrom) { + this.dateFormatFrom = new SimpleDateFormat(dateFormatFrom); + } + + public void setDateFormatTo(String dateFormatTo) { + this.dateFormatTo = new SimpleDateFormat(dateFormatTo); + } + + @Override + public Collection contributeMetadata(Element element) { + List values = new LinkedList<>(); + List namespaces = new ArrayList(); + for (String ns : prefixToNamespaceMapping.keySet()) { + namespaces.add(Namespace.getNamespace(prefixToNamespaceMapping.get(ns), ns)); + } + XPathExpression xpath = XPathFactory.instance() + .compile(query,Filters.fpassthrough(), null, namespaces); + List nodes = xpath.evaluate(element); + for (Object el : nodes) { + if (el instanceof Element) { + values.add(getMetadatum(field, ((Element) el).getText())); + } else if (el instanceof Attribute) { + values.add(getMetadatum(field, ((Attribute) el).getValue())); + } else if (el instanceof String) { + values.add(getMetadatum(field, (String) el)); + } else if (el instanceof Text) { + values.add(metadataFieldMapping.toDCValue(field, ((Text) el).getText())); + } else { + System.err.println("node of type: " + el.getClass()); + } + } + return values; + } + + private MetadatumDTO getMetadatum(MetadataFieldConfig field, String value) { + MetadatumDTO dcValue = new MetadatumDTO(); + if (field == null) { + return null; + } + try { + dcValue.setValue(dateFormatTo.format(dateFormatFrom.parse(value))); + } catch (ParseException e) { + dcValue.setValue(value); + } + dcValue.setElement(field.getElement()); + dcValue.setQualifier(field.getQualifier()); + dcValue.setSchema(field.getSchema()); + return dcValue; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java index 65d6d66947..982fc59af9 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/metadatamapping/contributor/SimpleXpathMetadatumContributor.java @@ -33,10 +33,10 @@ import org.springframework.beans.factory.annotation.Autowired; * @author Roeland Dillen (roeland at atmire dot com) */ public class SimpleXpathMetadatumContributor implements MetadataContributor { - private MetadataFieldConfig field; - private static final Logger log - = org.apache.logging.log4j.LogManager.getLogger(); + protected MetadataFieldConfig field; + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(); /** * Return prefixToNamespaceMapping @@ -47,7 +47,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor> metadataFieldMapping; + protected MetadataFieldMapping> metadataFieldMapping; /** * Return metadataFieldMapping @@ -79,7 +79,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor prefixToNamespaceMapping; + protected Map prefixToNamespaceMapping; /** * Initialize SimpleXpathMetadatumContributor with a query, prefixToNamespaceMapping and MetadataFieldConfig @@ -103,7 +103,7 @@ public class SimpleXpathMetadatumContributor implements MetadataContributor xpath = - XPathFactory.instance().compile(query, Filters.fpassthrough(), null, namespaces); - + XPathExpression xpath = XPathFactory.instance().compile(query, Filters.fpassthrough(), null,namespaces); List nodes = xpath.evaluate(t); for (Object el : nodes) { if (el instanceof Element) { diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java new file mode 100644 index 0000000000..0d7183a1f0 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloFieldMapping.java @@ -0,0 +1,37 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the Scielo metadatum fields on the DSpace metadatum fields + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4science dot it) + */ +@SuppressWarnings("rawtypes") +public class ScieloFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and + * metadata that will be set to the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "scieloMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..4f83ffe978 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/scielo/service/ScieloImportMetadataSourceServiceImpl.java @@ -0,0 +1,263 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.scielo.service; + +import java.io.BufferedReader; +import java.io.StringReader; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.el.MethodNotFoundException; +import javax.ws.rs.BadRequestException; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.http.client.utils.URIBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.FileSourceException; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying Scielo + * + * @author Boychuk Mykhaylo (boychuk.mykhaylo at 4Science dot it) + */ +public class ScieloImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService>> + implements QuerySource { + + /** + * This pattern is used when reading the Scielo response, + * to check if the fields you are reading is in rid format + */ + private static final String PATTERN = "^([A-Z][A-Z0-9]) - (.*)$"; + + /** + * This pattern is used to verify correct format of ScieloId + */ + private static final String ID_PATTERN = "^(.....)-(.*)-(...)$"; + + private int timeout = 1000; + + private String url; + + @Autowired + private LiveImportClient liveImportClient; + + @Override + public void init() throws Exception {} + + @Override + public String getImportSource() { + return "scielo"; + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query, count, start)); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + return retry(new SearchByQueryCallable(query)); + } + + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + List records = retry(new SearchByQueryCallable(query)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + List records = retry(new FindByIdCallable(id)); + return CollectionUtils.isEmpty(records) ? null : records.get(0); + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new SearchNBByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for Scielo"); + } + + /** + * This class is a Callable implementation to count the number of entries for an Scielo query + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchNBByQueryCallable implements Callable { + + private String query; + + private SearchNBByQueryCallable(String queryString) { + this.query = queryString; + } + + private SearchNBByQueryCallable(Query query) { + this.query = query.getParameterAsClass("query", String.class); + } + + @Override + public Integer call() throws Exception { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(query, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + return Objects.nonNull(records.size()) ? records.size() : 0; + } + } + + /** + * This class is a Callable implementation to get an Scielo entry using ScieloID + * The ScieloID to use can be passed through the constructor as a String + * or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class FindByIdCallable implements Callable> { + + private String id; + + private FindByIdCallable(String id) { + this.id = id; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String scieloId = id.trim(); + Pattern risPattern = Pattern.compile(ID_PATTERN); + Matcher risMatcher = risPattern.matcher(scieloId); + if (risMatcher.matches()) { + Map> params = new HashMap>(); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(scieloId, StandardCharsets.UTF_8)); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + if (Objects.nonNull(records) & !records.isEmpty()) { + results.add(transformSourceRecords(records.get(1))); + } + } else { + throw new BadRequestException("id provided : " + scieloId + " is not an ScieloID"); + } + return results; + } + } + + /** + * This class is a Callable implementation to get Scielo entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable> { + + private Query query; + + private SearchByQueryCallable(String queryString, Integer maxResult, Integer start) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("start", start); + query.addParameter("count", maxResult); + } + + private SearchByQueryCallable(Query query) { + this.query = query; + } + + @Override + public List call() throws Exception { + List results = new ArrayList<>(); + String q = query.getParameterAsClass("query", String.class); + Integer count = query.getParameterAsClass("count", Integer.class); + Integer start = query.getParameterAsClass("start", Integer.class); + URIBuilder uriBuilder = new URIBuilder(url + URLEncoder.encode(q, StandardCharsets.UTF_8)); + uriBuilder.addParameter("start", start.toString()); + uriBuilder.addParameter("count", count.toString()); + Map> params = new HashMap>(); + String resp = liveImportClient.executeHttpGetRequest(timeout, uriBuilder.toString(), params); + Map>> records = getRecords(resp); + for (int record : records.keySet()) { + results.add(transformSourceRecords(records.get(record))); + } + return results; + } + } + + private Map>> getRecords(String resp) throws FileSourceException { + Map>> records = new HashMap>>(); + BufferedReader reader; + int countRecord = 0; + try { + reader = new BufferedReader(new StringReader(resp)); + String line; + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.equals("") || line.matches("^\\s*$")) { + continue; + } + line = line.replaceAll("\\uFEFF", "").trim(); + Pattern risPattern = Pattern.compile(PATTERN); + Matcher risMatcher = risPattern.matcher(line); + if (risMatcher.matches()) { + if (risMatcher.group(1).equals("TY") & risMatcher.group(2).equals("JOUR")) { + countRecord ++; + Map> newMap = new HashMap>(); + records.put(countRecord, newMap); + } else { + Map> tag2values = records.get(countRecord); + List values = tag2values.get(risMatcher.group(1)); + if (Objects.isNull(values)) { + List newValues = new ArrayList(); + newValues.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), newValues); + } else { + values.add(risMatcher.group(2)); + tag2values.put(risMatcher.group(1), values); + } + } + } + } + } catch (Exception e) { + throw new FileSourceException("Cannot parse RIS file", e); + } + return records; + } + + public void setUrl(String url) { + this.url = url; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java new file mode 100644 index 0000000000..95d42e3a27 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/service/DoiCheck.java @@ -0,0 +1,47 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.service; + +import java.util.Arrays; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Utility class that provides methods to check if a given string is a DOI + * + * @author Corrado Lombardi (corrado.lombardi at 4science.it) + */ +public class DoiCheck { + + private static final List DOI_PREFIXES = Arrays.asList("http://dx.doi.org/", "https://dx.doi.org/"); + + private static final Pattern PATTERN = Pattern.compile("10.\\d{4,9}/[-._;()/:A-Z0-9]+" + + "|10.1002/[^\\s]+" + + "|10.\\d{4}/\\d+-\\d+X?(\\d+)" + + "\\d+<[\\d\\w]+:[\\d\\w]*>\\d+.\\d+.\\w+;\\d" + + "|10.1021/\\w\\w\\d++" + + "|10.1207/[\\w\\d]+\\&\\d+_\\d+", + Pattern.CASE_INSENSITIVE); + + private DoiCheck() {} + + public static boolean isDoi(final String value) { + Matcher m = PATTERN.matcher(purgeDoiValue(value)); + return m.matches(); + } + + public static String purgeDoiValue(final String query) { + String value = query.replaceAll(",", ""); + for (final String prefix : DOI_PREFIXES) { + value = value.replaceAll(prefix, ""); + } + return value.trim(); + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java new file mode 100644 index 0000000000..a4f90fa5ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/VuFindImportMetadataSourceServiceImpl.java @@ -0,0 +1,339 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; +import javax.el.MethodNotFoundException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.utils.URIBuilder; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.exception.MetadataSourceException; +import org.dspace.importer.external.liveimportclient.service.LiveImportClient; +import org.dspace.importer.external.service.AbstractImportMetadataSourceService; +import org.dspace.importer.external.service.components.QuerySource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Implements a data source for querying VuFind + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class VuFindImportMetadataSourceServiceImpl extends AbstractImportMetadataSourceService + implements QuerySource { + + private final static Logger log = LogManager.getLogger(); + + private String url; + private String urlSearch; + + private String fields; + + @Autowired + private LiveImportClient liveImportClient; + + public VuFindImportMetadataSourceServiceImpl(String fields) { + this.fields = fields; + } + + @Override + public String getImportSource() { + return "VuFind"; + } + + @Override + public ImportRecord getRecord(String id) throws MetadataSourceException { + String records = retry(new GetByVuFindIdCallable(id, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public int getRecordsCount(String query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public int getRecordsCount(Query query) throws MetadataSourceException { + return retry(new CountByQueryCallable(query)); + } + + @Override + public Collection getRecords(String query, int start, int count) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, count, start, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection getRecords(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + return extractMetadataFromRecordList(records); + } + + @Override + public ImportRecord getRecord(Query query) throws MetadataSourceException { + String records = retry(new SearchByQueryCallable(query, fields)); + List importRecords = extractMetadataFromRecordList(records); + return importRecords != null && !importRecords.isEmpty() ? importRecords.get(0) : null; + } + + @Override + public Collection findMatchingRecords(Query query) throws MetadataSourceException { + String records = retry(new FindMatchingRecordsCallable(query)); + return extractMetadataFromRecordList(records); + } + + @Override + public Collection findMatchingRecords(Item item) throws MetadataSourceException { + throw new MethodNotFoundException("This method is not implemented for VuFind"); + } + + @Override + public void init() throws Exception {} + + /** + * This class is a Callable implementation to count the number of entries for an VuFind query. + * This Callable use as query value to CrossRef the string queryString passed to constructor. + * If the object will be construct through Query.class instance, the value of the Query's + * map with the key "query" will be used. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class CountByQueryCallable implements Callable { + + private Query query; + + public CountByQueryCallable(String queryString) { + query = new Query(); + query.addParameter("query", queryString); + } + + public CountByQueryCallable(Query query) { + this.query = query; + } + + @Override + public Integer call() throws Exception { + Integer start = 0; + Integer count = 1; + int page = start / count + 1; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + uriBuilder.addParameter("page", String.valueOf(page)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + Map> params = new HashMap>(); + String responseString = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + JsonNode node = convertStringJsonToJsonNode(responseString); + JsonNode resultCountNode = node.get("resultCount"); + return resultCountNode.intValue(); + } + } + + /** + * This class is a Callable implementation to get an VuFind entry using VuFind id + * The id to use can be passed through the constructor as a String or as Query's map entry, with the key "id". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class GetByVuFindIdCallable implements Callable { + + private String id; + + private String fields; + + public GetByVuFindIdCallable(String id, String fields) { + this.id = id; + if (fields != null && fields.length() > 0) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("id", id); + uriBuilder.addParameter("prettyPrint", "false"); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + String response = liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + return response; + } + } + + /** + * This class is a Callable implementation to get VuFind entries based on query object. + * This Callable use as query value the string queryString passed to constructor. + * If the object will be construct through Query.class instance, a Query's map entry with key "query" will be used. + * Pagination is supported too, using the value of the Query's map with keys "start" and "count". + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + private class SearchByQueryCallable implements Callable { + + private Query query; + + private String fields; + + public SearchByQueryCallable(String queryString, Integer maxResult, Integer start, String fields) { + query = new Query(); + query.addParameter("query", queryString); + query.addParameter("count", maxResult); + query.addParameter("start", start); + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + public SearchByQueryCallable(Query query, String fields) { + this.query = query; + if (StringUtils.isNotBlank(fields)) { + this.fields = fields; + } else { + this.fields = null; + } + } + + @Override + public String call() throws Exception { + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(urlSearch); + uriBuilder.addParameter("type", "AllField"); + //page looks 1 based (start = 0, count = 20 -> page = 0) + uriBuilder.addParameter("page", String.valueOf(page + 1)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", String.valueOf(true)); + uriBuilder.addParameter("lookfor", query.getParameterAsClass("query", String.class)); + if (StringUtils.isNotBlank(fields)) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + /** + * This class is a Callable implementation to search VuFind entries using author and title. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ + public class FindMatchingRecordsCallable implements Callable { + + private Query query; + + private String fields; + + public FindMatchingRecordsCallable(Query query) { + this.query = query; + } + + @Override + public String call() throws Exception { + String author = query.getParameterAsClass("author", String.class); + String title = query.getParameterAsClass("title", String.class); + Integer start = query.getParameterAsClass("start", Integer.class); + Integer count = query.getParameterAsClass("count", Integer.class); + int page = count != 0 ? start / count : 0; + URIBuilder uriBuilder = new URIBuilder(url); + uriBuilder.addParameter("type", "AllField"); + //pagination is 1 based (first page: start = 0, count = 20 -> page = 0 -> +1 = 1) + uriBuilder.addParameter("page", String.valueOf(page ++)); + uriBuilder.addParameter("limit", count.toString()); + uriBuilder.addParameter("prettyPrint", "true"); + if (fields != null && !fields.isEmpty()) { + for (String field : fields.split(",")) { + uriBuilder.addParameter("field[]", field); + } + } + String filter = StringUtils.EMPTY; + if (StringUtils.isNotBlank(author)) { + filter = "author:" + author; + } + if (StringUtils.isNotBlank(title)) { + if (StringUtils.isNotBlank(filter)) { + filter = filter + " AND title:" + title; + } else { + filter = "title:" + title; + } + } + uriBuilder.addParameter("lookfor", filter); + Map> params = new HashMap>(); + return liveImportClient.executeHttpGetRequest(1000, uriBuilder.toString(), params); + } + + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + private List extractMetadataFromRecordList(String records) { + List recordsResult = new ArrayList<>(); + JsonNode jsonNode = convertStringJsonToJsonNode(records); + JsonNode node = jsonNode.get("records"); + if (Objects.nonNull(node) && node.isArray()) { + Iterator nodes = node.iterator(); + while (nodes.hasNext()) { + recordsResult.add(transformSourceRecords(nodes.next().toString())); + } + } + return recordsResult; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getUrlSearch() { + return urlSearch; + } + + public void setUrlSearch(String urlSearch) { + this.urlSearch = urlSearch; + } + +} \ No newline at end of file diff --git a/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java new file mode 100644 index 0000000000..b14927a14c --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/vufind/metadatamapping/VuFindFieldMapping.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.vufind.metadatamapping; + +import java.util.Map; +import javax.annotation.Resource; + +import org.dspace.importer.external.metadatamapping.AbstractMetadataFieldMapping; + +/** + * An implementation of {@link AbstractMetadataFieldMapping} + * Responsible for defining the mapping of the VuFind metadatum fields on the DSpace metadatum fields + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@SuppressWarnings("rawtypes") +public class VuFindFieldMapping extends AbstractMetadataFieldMapping { + + /** + * Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + * only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + * what metadatafield is generated. + * + * @param metadataFieldMap The map containing the link between retrieve metadata and metadata that will be set to + * the item. + */ + @Override + @SuppressWarnings("unchecked") + @Resource(name = "vufindMetadataFieldMap") + public void setMetadataFieldMap(Map metadataFieldMap) { + super.setMetadataFieldMap(metadataFieldMap); + } + +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java new file mode 100644 index 0000000000..7f8a11e5ba --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/statistics/GeoIpService.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.statistics; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; + +import com.maxmind.geoip2.DatabaseReader; +import org.apache.commons.lang3.StringUtils; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Service that handle the GeoIP database file. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GeoIpService { + + @Autowired + private ConfigurationService configurationService; + + /** + * Returns an instance of {@link DatabaseReader} based on the configured db + * file, if any. + * + * @return the Database reader + * @throws IllegalStateException if the db file is not configured correctly + */ + public DatabaseReader getDatabaseReader() throws IllegalStateException { + String dbPath = configurationService.getProperty("usage-statistics.dbfile"); + if (StringUtils.isBlank(dbPath)) { + throw new IllegalStateException("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + } + + try { + File dbFile = new File(dbPath); + return new DatabaseReader.Builder(dbFile).build(); + } catch (FileNotFoundException fe) { + throw new IllegalStateException( + "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + + "based reports! Please see the DSpace installation instructions for instructions to install " + + "this file.",fe); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + + "DSpace installation instructions for more details.", e); + } + } +} diff --git a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java index 9cc032a998..4b2ae94e75 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java @@ -8,7 +8,6 @@ package org.dspace.statistics; import java.io.File; -import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; @@ -142,6 +141,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea private ClientInfoService clientInfoService; @Autowired private SolrStatisticsCore solrStatisticsCore; + @Autowired + private GeoIpService geoIpService; /** URL to the current-year statistics core. Prior-year shards will have a year suffixed. */ private String statisticsCoreURL; @@ -179,26 +180,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea //spiderIps = SpiderDetector.getSpiderIpAddresses(); DatabaseReader service = null; - // Get the db file for the location - String dbPath = configurationService.getProperty("usage-statistics.dbfile"); - if (dbPath != null) { - try { - File dbFile = new File(dbPath); - service = new DatabaseReader.Builder(dbFile).build(); - } catch (FileNotFoundException fe) { - log.error( - "The GeoLite Database file is missing (" + dbPath + ")! Solr Statistics cannot generate location " + - "based reports! Please see the DSpace installation instructions for instructions to install " + - "this file.", - fe); - } catch (IOException e) { - log.error( - "Unable to load GeoLite Database file (" + dbPath + ")! You may need to reinstall it. See the " + - "DSpace installation instructions for more details.", - e); - } - } else { - log.error("The required 'dbfile' configuration is missing in solr-statistics.cfg!"); + try { + service = geoIpService.getDatabaseReader(); + } catch (IllegalStateException ex) { + log.error(ex); } locationService = service; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 8835e03104..1a690afd86 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -406,6 +406,12 @@ public class DatabaseUtils { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); + if (dbType.equals(DBMS_ORACLE)) { + System.out.println("===================================="); + System.out.println("WARNING: Oracle support is deprecated!"); + System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); + System.out.println("====================================="); + } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); System.out.println("Database Username: " + meta.getUserName()); @@ -539,6 +545,10 @@ public class DatabaseUtils { String dbType = getDbType(connection); connection.close(); + if (dbType.equals(DBMS_ORACLE)) { + log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); + } + // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index 0583fb493c..b537819c06 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -119,3 +119,4 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused workflow group {1}. Delete the tasks and group first if you want to remove this user. org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided +org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md index 229b70ec37..6cef123859 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md @@ -1,5 +1,10 @@ # Oracle Flyway Database Migrations (i.e. Upgrades) +--- +WARNING: Oracle Support is deprecated. +See https://github.com/DSpace/DSpace/issues/8214 +--- + The SQL scripts in this directory are Oracle-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml index 5e69ee9c42..fcc8a3546c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-import-services.xml @@ -115,6 +115,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml deleted file mode 100644 index c8197970a9..0000000000 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-configuration-services.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - dc.identifier.issn - - - - - - - - - diff --git a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml index 6fe8ddb07b..76891d169c 100644 --- a/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml +++ b/dspace-api/src/main/resources/spring/spring-dspace-addon-sherpa-services.xml @@ -25,10 +25,38 @@ - + + + + + + + + dc.identifier.issn + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index a8165dd5d4..f40298db30 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -21,6 +21,7 @@ + @@ -82,6 +83,11 @@ submission-form + + org.dspace.app.rest.submit.step.DescribeStep + submission-form + + submit.progressbar.accessCondition org.dspace.app.rest.submit.step.AccessConditionStep @@ -131,6 +137,12 @@ org.dspace.app.rest.submit.step.DescribeStep submission-form + + + submit.progressbar.sherpapolicy + org.dspace.app.rest.submit.step.SherpaPolicyStep + sherpaPolicy + @@ -166,6 +178,7 @@ + @@ -191,6 +204,10 @@ + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/local.cfg b/dspace-api/src/test/data/dspaceFolder/config/local.cfg index 3c19a68e9f..93e11b059c 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/local.cfg +++ b/dspace-api/src/test/data/dspaceFolder/config/local.cfg @@ -144,3 +144,14 @@ authentication-ip.Student = 6.6.6.6 useProxies = true proxies.trusted.ipranges = 7.7.7.7 proxies.trusted.include_ui_ip = true + +# For the tests we have to disable this health indicator because there isn't a mock server and the calculated status was DOWN +management.health.solrOai.enabled = false + +researcher-profile.entity-type = Person + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +choices.plugin.dspace.object.owner = EPersonAuthority +choices.presentation.dspace.object.owner = suggest +authority.controlled.dspace.object.owner = true \ No newline at end of file diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml index ac163d3581..332811a725 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/external-services.xml @@ -6,6 +6,8 @@ + + @@ -13,13 +15,7 @@ - - - - - - - + Journal @@ -28,13 +24,7 @@ - - - - - - - + Journal @@ -43,13 +33,7 @@ - - - - - - - + OrgUnit diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml index fb9e31b9a0..206326f3db 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/sherpa.xml @@ -33,4 +33,18 @@ + + + + + + + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml index 5f86c73598..32ab90b2cc 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/spring/api/solr-services.xml @@ -47,5 +47,7 @@ + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml b/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml deleted file mode 100644 index adb2340f10..0000000000 --- a/dspace-api/src/test/data/dspaceFolder/config/spring/api/spring-dspace-addon-sherpa-services.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 67946788b5..7438fda852 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -140,6 +140,7 @@ ispartofseries true + Technical Report series Enter the series and number assigned to this item by your community. @@ -302,6 +303,75 @@ it, please enter the types and the actual numbers or codes. +
+ + + dc + title + + false + + onebox + Enter the main title of the item. + You must enter a main title for this item. + + + + + + dc + date + issued + false + + + date + Please give the date of previous publication or public distribution. + You can leave out the day and/or month if they aren't + applicable. + You must enter at least the year. + + + + + dc + type + + true + + dropdown + Select the type(s) of content of the item. To select more than one value in the list, you may have to hold down the "CTRL" or "Shift" key. + + + + + + + dc + identifier + isbn + true + + Book + onebox + Enter the ISBN of the book. + An ISBN is required. + + + + dc + identifier + isbn + true + + Book chapter + onebox + Enter the ISBN of the book in which this chapter appears. + + + +
+
diff --git a/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java new file mode 100644 index 0000000000..87127f9cf8 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/AccessStatusServiceTest.java @@ -0,0 +1,126 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.fail; + +import java.sql.SQLException; + +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Unit Tests for access status service + */ +public class AccessStatusServiceTest extends AbstractUnitTest { + + private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStatusServiceTest.class); + + private Collection collection; + private Community owningCommunity; + private Item item; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected AccessStatusService accessStatusService = + AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + item = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, item); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + item = null; + collection = null; + owningCommunity = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + @Test + public void testGetAccessStatus() throws Exception { + String status = accessStatusService.getAccessStatus(context, item); + assertNotEquals("testGetAccessStatus 0", status, DefaultAccessStatusHelper.UNKNOWN); + } +} diff --git a/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java new file mode 100644 index 0000000000..a41e985deb --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/access/status/DefaultAccessStatusHelperTest.java @@ -0,0 +1,423 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.access.status; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.fail; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.AbstractUnitTest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.factory.AuthorizeServiceFactory; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.Bitstream; +import org.dspace.content.Bundle; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.BundleService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.InstallItemService; +import org.dspace.content.service.ItemService; +import org.dspace.content.service.WorkspaceItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.Group; +import org.dspace.eperson.factory.EPersonServiceFactory; +import org.dspace.eperson.service.GroupService; +import org.joda.time.LocalDate; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class DefaultAccessStatusHelperTest extends AbstractUnitTest { + + private static final Logger log = LogManager.getLogger(DefaultAccessStatusHelperTest.class); + + private Collection collection; + private Community owningCommunity; + private Item itemWithoutBundle; + private Item itemWithoutBitstream; + private Item itemWithBitstream; + private Item itemWithEmbargo; + private Item itemWithDateRestriction; + private Item itemWithGroupRestriction; + private Item itemWithoutPolicy; + private Item itemWithoutPrimaryBitstream; + private Item itemWithPrimaryAndMultipleBitstreams; + private Item itemWithoutPrimaryAndMultipleBitstreams; + private DefaultAccessStatusHelper helper; + private Date threshold; + + protected CommunityService communityService = + ContentServiceFactory.getInstance().getCommunityService(); + protected CollectionService collectionService = + ContentServiceFactory.getInstance().getCollectionService(); + protected ItemService itemService = + ContentServiceFactory.getInstance().getItemService(); + protected WorkspaceItemService workspaceItemService = + ContentServiceFactory.getInstance().getWorkspaceItemService(); + protected InstallItemService installItemService = + ContentServiceFactory.getInstance().getInstallItemService(); + protected BundleService bundleService = + ContentServiceFactory.getInstance().getBundleService(); + protected BitstreamService bitstreamService = + ContentServiceFactory.getInstance().getBitstreamService(); + protected ResourcePolicyService resourcePolicyService = + AuthorizeServiceFactory.getInstance().getResourcePolicyService(); + protected GroupService groupService = + EPersonServiceFactory.getInstance().getGroupService(); + + /** + * This method will be run before every test as per @Before. It will + * initialize resources required for the tests. + * + * Other methods can be annotated with @Before here or in subclasses + * but no execution order is guaranteed + */ + @Before + @Override + public void init() { + super.init(); + try { + context.turnOffAuthorisationSystem(); + owningCommunity = communityService.create(null, context); + collection = collectionService.create(context, owningCommunity); + itemWithoutBundle = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithEmbargo = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithDateRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithGroupRestriction = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPolicy = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryBitstream = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + itemWithoutPrimaryAndMultipleBitstreams = installItemService.installItem(context, + workspaceItemService.create(context, collection, true)); + context.restoreAuthSystemState(); + } catch (AuthorizeException ex) { + log.error("Authorization Error in init", ex); + fail("Authorization Error in init: " + ex.getMessage()); + } catch (SQLException ex) { + log.error("SQL Error in init", ex); + fail("SQL Error in init: " + ex.getMessage()); + } + helper = new DefaultAccessStatusHelper(); + threshold = new LocalDate(10000, 1, 1).toDate(); + } + + /** + * This method will be run after every test as per @After. It will + * clean resources initialized by the @Before methods. + * + * Other methods can be annotated with @After here or in subclasses + * but no execution order is guaranteed + */ + @After + @Override + public void destroy() { + context.turnOffAuthorisationSystem(); + try { + itemService.delete(context, itemWithoutBundle); + itemService.delete(context, itemWithoutBitstream); + itemService.delete(context, itemWithBitstream); + itemService.delete(context, itemWithEmbargo); + itemService.delete(context, itemWithDateRestriction); + itemService.delete(context, itemWithGroupRestriction); + itemService.delete(context, itemWithoutPolicy); + itemService.delete(context, itemWithoutPrimaryBitstream); + itemService.delete(context, itemWithPrimaryAndMultipleBitstreams); + itemService.delete(context, itemWithoutPrimaryAndMultipleBitstreams); + } catch (Exception e) { + // ignore + } + try { + collectionService.delete(context, collection); + } catch (Exception e) { + // ignore + } + try { + communityService.delete(context, owningCommunity); + } catch (Exception e) { + // ignore + } + context.restoreAuthSystemState(); + itemWithoutBundle = null; + itemWithoutBitstream = null; + itemWithBitstream = null; + itemWithEmbargo = null; + itemWithDateRestriction = null; + itemWithGroupRestriction = null; + itemWithoutPolicy = null; + itemWithoutPrimaryBitstream = null; + itemWithPrimaryAndMultipleBitstreams = null; + itemWithoutPrimaryAndMultipleBitstreams = null; + collection = null; + owningCommunity = null; + helper = null; + threshold = null; + communityService = null; + collectionService = null; + itemService = null; + workspaceItemService = null; + installItemService = null; + bundleService = null; + bitstreamService = null; + resourcePolicyService = null; + groupService = null; + try { + super.destroy(); + } catch (Exception e) { + // ignore + } + } + + /** + * Test for a null item + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNullItem() throws Exception { + String status = helper.getAccessStatusFromItem(context, null, threshold); + assertThat("testWithNullItem 0", status, equalTo(DefaultAccessStatusHelper.UNKNOWN)); + } + + /** + * Test for an item with no bundle + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBundle() throws Exception { + String status = helper.getAccessStatusFromItem(context, itemWithoutBundle, threshold); + assertThat("testWithoutBundle 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with no bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + bundleService.create(context, itemWithoutBitstream, Constants.CONTENT_BUNDLE_NAME); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutBitstream, threshold); + assertThat("testWithoutBitstream 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY)); + } + + /** + * Test for an item with a basic bitstream (open access) + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithBitstream, threshold); + assertThat("testWithBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithEmbargo() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithEmbargo, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold); + assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + } + + /** + * Test for an item with an anonymous date restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithDateRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithDateRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(10000, 1, 1).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithDateRestriction, threshold); + assertThat("testWithDateRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with a group restriction + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithGroupRestriction() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithGroupRestriction, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Restriction"); + Group group = groupService.findByName(context, Group.ADMIN); + policy.setGroup(group); + policy.setAction(Constants.READ); + policies.add(policy); + authorizeService.removeAllPolicies(context, bitstream); + authorizeService.addPolicies(context, policies, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithGroupRestriction, threshold); + assertThat("testWithGroupRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no policy + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPolicy() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPolicy, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "primary"); + bundle.setPrimaryBitstreamID(bitstream); + authorizeService.removeAllPolicies(context, bitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPolicy, threshold); + assertThat("testWithoutPolicy 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED)); + } + + /** + * Test for an item with no primary bitstream + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithoutPrimaryBitstream() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryBitstream, Constants.CONTENT_BUNDLE_NAME); + Bitstream bitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bitstream.setName(context, "first"); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryBitstream, threshold); + assertThat("testWithoutPrimaryBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } + + /** + * Test for an item with an open access bitstream + * and another primary bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream primaryBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + bundle.setPrimaryBitstreamID(primaryBitstream); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, primaryBitstream); + authorizeService.addPolicies(context, policies, primaryBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO)); + } + + /** + * Test for an item with an open access bitstream + * and another bitstream on embargo + * @throws java.lang.Exception passed through. + */ + @Test + public void testWithNoPrimaryAndMultipleBitstreams() throws Exception { + context.turnOffAuthorisationSystem(); + Bundle bundle = bundleService.create(context, itemWithoutPrimaryAndMultipleBitstreams, + Constants.CONTENT_BUNDLE_NAME); + bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + Bitstream anotherBitstream = bitstreamService.create(context, bundle, + new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8))); + List policies = new ArrayList<>(); + ResourcePolicy policy = resourcePolicyService.create(context); + policy.setRpName("Embargo"); + Group group = groupService.findByName(context, Group.ANONYMOUS); + policy.setGroup(group); + policy.setAction(Constants.READ); + policy.setStartDate(new LocalDate(9999, 12, 31).toDate()); + policies.add(policy); + authorizeService.removeAllPolicies(context, anotherBitstream); + authorizeService.addPolicies(context, policies, anotherBitstream); + context.restoreAuthSystemState(); + String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold); + assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS)); + } +} diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java index b218ba82fe..239d2864bf 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/MockSHERPAService.java @@ -11,6 +11,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; +import java.util.Objects; import org.dspace.app.sherpa.v2.SHERPAPublisherResponse; import org.dspace.app.sherpa.v2.SHERPAResponse; @@ -25,20 +26,6 @@ import org.dspace.app.sherpa.v2.SHERPAResponse; */ public class MockSHERPAService extends SHERPAService { - /** - * Simple overridden 'searchByJournalISSN' so that we do attempt to build the URI but rather than make - * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our - * test resources. - * If URI creation, parsing, or IO fails along the way, a SHERPAResponse with an error message set will be - * returned. - * @param query ISSN string to pass in an "issn equals" API query - * @return SHERPAResponse - */ - @Override - public SHERPAResponse searchByJournalISSN(String query) { - return performRequest("publication", "issn", "equals", query, 0, 1); - } - /** * Simple overridden performRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAResponse for The Lancet based on known-good JSON stored with our @@ -67,8 +54,12 @@ public class MockSHERPAService extends SHERPAService { return new SHERPAResponse("Error building URI"); } - // Get mock JSON - in this case, a known good result for The Lancet - content = getClass().getResourceAsStream("thelancet.json"); + // Get mock JSON + // if a file with the name contained in the value does not exist, returns thelancet.json + content = getContent(value.concat(".json")); + if (Objects.isNull(content)) { + content = getContent("thelancet.json"); + } // Parse JSON input stream and return response for later evaluation return new SHERPAResponse(content, SHERPAResponse.SHERPAFormat.JSON); @@ -88,6 +79,10 @@ public class MockSHERPAService extends SHERPAService { } } + private InputStream getContent(String fileName) { + return getClass().getResourceAsStream(fileName); + } + /** * Simple overridden performPublisherRequest so that we do attempt to build the URI but rather than make * an actual HTTP call, return parsed SHERPAPublisherResponse for PLOS based on known-good JSON stored with our @@ -133,4 +128,5 @@ public class MockSHERPAService extends SHERPAService { return new SHERPAPublisherResponse(e.getMessage()); } } + } diff --git a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java index 1eaa916f56..438d754aa5 100644 --- a/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java +++ b/dspace-api/src/test/java/org/dspace/app/sherpa/submit/SHERPASubmitServiceTest.java @@ -11,7 +11,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.SQLException; -import java.util.List; import org.dspace.AbstractUnitTest; import org.dspace.app.sherpa.v2.SHERPAResponse; @@ -109,20 +108,18 @@ public class SHERPASubmitServiceTest extends AbstractUnitTest { // Get responses from SHERPA submit service, which should inspect item ISSNs and perform search // on the mock SHERPA service - List responses = sherpaSubmitService.searchRelatedJournals(context, testItem); + SHERPAResponse response = sherpaSubmitService.searchRelatedJournals(context, testItem); // Make sure response is not null or empty - assertTrue("Response list should not be null or empty", - responses != null && !responses.isEmpty()); + assertTrue("Response should not be null", response != null); // For each response (there should be only one based on test data) perform the standard set // of thorough parsing tests - for (SHERPAResponse response : responses) { - // Assert response is not error, or fail with message - assertFalse("Response was flagged as 'isError'", response.isError()); - // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst - } + // Assert response is not error, or fail with message + assertFalse("Response was flagged as 'isError'", response.isError()); + + // Skip remainder of parsing tests - these are already done in SHERPAServiceTEst } } diff --git a/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java new file mode 100644 index 0000000000..be4d6a12da --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/app/util/SubmissionConfigTest.java @@ -0,0 +1,88 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.util.ArrayList; +import java.util.List; + +import org.dspace.AbstractUnitTest; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests for parsing and utilities on submission config forms / readers + * + * @author Kim Shepherd + */ +public class SubmissionConfigTest extends AbstractUnitTest { + + DCInputsReader inputReader; + + @BeforeClass + public static void setUpClass() { + } + + @AfterClass + public static void tearDownClass() { + } + + @Before + public void setUp() throws DCInputsReaderException { + inputReader = new DCInputsReader(); + } + + @After + public void tearDown() { + inputReader = null; + } + + @Test + public void testReadAndProcessTypeBindSubmissionConfig() + throws SubmissionConfigReaderException, DCInputsReaderException { + // Set up test data. This should match the typebind test submission / form config + String typeBindHandle = "123456789/typebind-test"; + String typeBindSubmissionName = "typebindtest"; + String typeBindSubmissionStepName = "typebindtest"; + + // Expected field lists from typebindtest form + List allConfiguredFields = new ArrayList<>(); + allConfiguredFields.add("dc.title"); + allConfiguredFields.add("dc.date.issued"); + allConfiguredFields.add("dc.type"); + allConfiguredFields.add("dc.identifier.isbn"); + List unboundFields = allConfiguredFields.subList(0, 3); + + // Get submission configuration + SubmissionConfig submissionConfig = + new SubmissionConfigReader().getSubmissionConfigByCollection(typeBindHandle); + // Submission name should match name defined in item-submission.xml + assertEquals(typeBindSubmissionName, submissionConfig.getSubmissionName()); + // Step 0 - our process only has one step. It should not be null and have the ID typebindtest + SubmissionStepConfig submissionStepConfig = submissionConfig.getStep(0); + assertNotNull(submissionStepConfig); + assertEquals(typeBindSubmissionStepName, submissionStepConfig.getId()); + // Get inputs and allowed fields + DCInputSet inputConfig = inputReader.getInputsByFormName(submissionStepConfig.getId()); + List allowedFieldsForBook = inputConfig.populateAllowedFieldNames("Book"); + List allowedFieldsForBookChapter = inputConfig.populateAllowedFieldNames("Book chapter"); + List allowedFieldsForArticle = inputConfig.populateAllowedFieldNames("Article"); + List allowedFieldsForNoType = inputConfig.populateAllowedFieldNames(null); + // Book and book chapter should be allowed all 5 fields (each is bound to dc.identifier.isbn) + assertEquals(allConfiguredFields, allowedFieldsForBook); + assertEquals(allConfiguredFields, allowedFieldsForBookChapter); + // Article and type should match a subset of the fields without ISBN + assertEquals(unboundFields, allowedFieldsForArticle); + assertEquals(unboundFields, allowedFieldsForNoType); + } +} diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 57d9ca41ce..d48f1ecc4f 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -8,6 +8,8 @@ package org.dspace.builder; import static org.dspace.content.LicenseUtils.getLicenseText; +import static org.dspace.content.MetadataSchemaEnum.DC; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; import java.io.IOException; import java.sql.SQLException; @@ -76,6 +78,11 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { public ItemBuilder withAuthor(final String authorName) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", authorName); } + + public ItemBuilder withAuthor(final String authorName, final String authority) { + return addMetadataValue(item, DC.getName(), "contributor", "author", null, authorName, authority, 600); + } + public ItemBuilder withAuthor(final String authorName, final String authority, final int confidence) { return addMetadataValue(item, MetadataSchemaEnum.DC.getName(), "contributor", "author", null, authorName, authority, confidence); @@ -147,6 +154,10 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return addMetadataValue(item, schema, element, qualifier, value); } + public ItemBuilder withDspaceObjectOwner(String value, String authority) { + return addMetadataValue(item, "dspace", "object", "owner", null, value, authority, CF_ACCEPTED); + } + public ItemBuilder makeUnDiscoverable() { item.setDiscoverable(false); return this; @@ -175,7 +186,7 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { /** * Create an admin group for the collection with the specified members * - * @param members epersons to add to the admin group + * @param ePerson epersons to add to the admin group * @return this builder * @throws SQLException * @throws AuthorizeException @@ -184,6 +195,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { return setAdminPermission(item, ePerson, null); } + public ItemBuilder withPersonEmail(String email) { + return addMetadataValue(item, "person", "email", null, email); + } @Override public Item build() { diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index cf6fb4ba98..8b7bc2978b 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -177,14 +177,18 @@ public class WorkspaceItemBuilder extends AbstractBuilder specialGroups = instance.getSpecialGroups(); - assertThat("testGetSpecialGroup 0", specialGroups.size(), equalTo(2)); - assertThat("testGetSpecialGroup 1", specialGroups.get(0), equalTo(group)); - assertThat("testGetSpecialGroup 1", specialGroups.get(1), equalTo(adminGroup)); + assertThat("testGetSpecialGroup size", specialGroups.size(), equalTo(2)); + assertThat("testGetSpecialGroup content", specialGroups, hasItems(group, adminGroup)); // Cleanup our context & group groupService.delete(instance, group); diff --git a/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java new file mode 100644 index 0000000000..17e21779d4 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/DoiCheckTest.java @@ -0,0 +1,65 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.text.ParseException; +import java.util.Arrays; +import java.util.List; + +import org.apache.commons.lang.StringUtils; +import org.dspace.importer.external.service.DoiCheck; +import org.junit.Test; + +/** + * Test class for the DoiCheck + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +public class DoiCheckTest { + + @Test + public void checkDOIsTest() throws ParseException { + for (String doi : DOIsToTest()) { + assertTrue("The: " + doi + " is a doi!", DoiCheck.isDoi(doi)); + } + } + + @Test + public void checkWrongDOIsTest() throws ParseException { + for (String key : wrongDOIsToTest()) { + assertFalse("This : " + key + " isn't a doi!", DoiCheck.isDoi(key)); + } + } + + private List DOIsToTest() { + return Arrays.asList( + "10.1430/8105", + "10.1038/nphys1170", + "10.1002/0470841559.ch1", + "10.1594/PANGAEA.726855", + "10.1594/GFZ.GEOFON.gfz2009kciu", + "10.3866/PKU.WHXB201112303", + "10.11467/isss2003.7.1_11", + "10.3972/water973.0145.db" + ); + } + + private List wrongDOIsToTest() { + return Arrays.asList( + StringUtils.EMPTY, + "123456789", + "nphys1170/10.1038", + "10.", "10", + "10.1038/" + ); + } + +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json new file mode 100644 index 0000000000..3b9e474502 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/0000-0000.json @@ -0,0 +1,3 @@ +{ + "items": [] +} \ No newline at end of file diff --git a/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json new file mode 100644 index 0000000000..2e5c7e2db9 --- /dev/null +++ b/dspace-api/src/test/resources/org/dspace/app/sherpa/2731-0582.json @@ -0,0 +1,504 @@ +{ + "items": [ + { + "system_metadata": { + "id": 40863, + "uri": "https://v2.sherpa.ac.uk/id/publication/40863", + "date_modified": "2022-03-25 14:08:29", + "publicly_visible": "yes", + "publicly_visible_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "date_created": "2022-01-11 09:43:53" + }, + "tj_status_phrases": [ + { + "phrase": "Plan S Approved", + "value": "plan_s_approved", + "language": "en" + } + ], + "type_phrases": [ + { + "value": "journal", + "phrase": "Journal", + "language": "en" + } + ], + "id": 40863, + "issns": [ + { + "issn": "2731-0582" + } + ], + "publishers": [ + { + "relationship_type": "commercial_publisher", + "relationship_type_phrases": [ + { + "value": "commercial_publisher", + "phrase": "Commercial Publisher", + "language": "en" + } + ], + "publisher": { + "id": 3286, + "name": [ + { + "name": "Nature Research", + "language": "en", + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ], + "preferred": "name", + "language_phrases": [ + { + "phrase": "English", + "value": "en", + "language": "en" + } + ] + } + ], + "imprint_of_id": 62037, + "country": "gb", + "country_phrases": [ + { + "value": "gb", + "phrase": "United Kingdom", + "language": "en" + } + ], + "publication_count": 87, + "uri": "https://v2.sherpa.ac.uk/id/publisher/3286", + "url": "https://www.nature.com/" + } + } + ], + "listed_in_doaj_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "listed_in_doaj": "no", + "tj_status": [ + "plan_s_approved" + ], + "publisher_policy": [ + { + "open_access_prohibited": "no", + "id": 3286, + "publication_count": 36, + "internal_moniker": "Default Policy", + "urls": [ + { + "description": "Self archiving and license to publish", + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish" + }, + { + "description": "Preprints and Conference Proceedings", + "url": "https://www.nature.com/nature-portfolio/editorial-policies/preprints-and-conference-proceedings" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/accepted-manuscript-terms", + "description": "Accepted manuscript terms of use" + } + ], + "open_access_prohibited_phrases": [ + { + "value": "no", + "phrase": "No", + "language": "en" + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/3286", + "permitted_oa": [ + { + "prerequisites": { + "prerequisites_phrases": [ + { + "language": "en", + "value": "when_research_article", + "phrase": "If a Research Article" + } + ], + "prerequisites": [ + "when_research_article" + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ], + "article_version_phrases": [ + { + "language": "en", + "value": "submitted", + "phrase": "Submitted" + } + ], + "additional_oa_fee": "no", + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "article_version": [ + "submitted" + ], + "location": { + "location_phrases": [ + { + "value": "authors_homepage", + "phrase": "Author's Homepage", + "language": "en" + }, + { + "language": "en", + "phrase": "Funder Designated Location", + "value": "funder_designated_location" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "phrase": "Preprint Repository", + "value": "preprint_repository", + "language": "en" + } + ], + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "preprint_repository" + ] + }, + "conditions": [ + "Must link to publisher version", + "Upon publication, source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ] + }, + { + "embargo": { + "units": "months", + "amount": 6, + "units_phrases": [ + { + "phrase": "Months", + "value": "months", + "language": "en" + } + ] + }, + "license": [ + { + "license_phrases": [ + { + "phrase": "Publisher's Bespoke License", + "value": "bespoke_license", + "language": "en" + } + ], + "license": "bespoke_license" + } + ], + "article_version_phrases": [ + { + "value": "accepted", + "phrase": "Accepted", + "language": "en" + } + ], + "additional_oa_fee": "no", + "conditions": [ + "Must link to publisher version", + "Published source must be acknowledged and DOI cited", + "Post-prints are subject to Springer Nature re-use terms", + "Non-commercial use only" + ], + "copyright_owner_phrases": [ + { + "phrase": "Authors", + "value": "authors", + "language": "en" + } + ], + "location": { + "location": [ + "authors_homepage", + "funder_designated_location", + "institutional_repository", + "named_repository" + ], + "location_phrases": [ + { + "phrase": "Author's Homepage", + "value": "authors_homepage", + "language": "en" + }, + { + "phrase": "Funder Designated Location", + "value": "funder_designated_location", + "language": "en" + }, + { + "language": "en", + "value": "institutional_repository", + "phrase": "Institutional Repository" + }, + { + "language": "en", + "value": "named_repository", + "phrase": "Named Repository" + } + ], + "named_repository": [ + "PubMed Central", + "Europe PMC" + ] + }, + "article_version": [ + "accepted" + ], + "prerequisites": { + "prerequisites": [ + "when_research_article" + ], + "prerequisites_phrases": [ + { + "value": "when_research_article", + "phrase": "If a Research Article", + "language": "en" + } + ] + }, + "copyright_owner": "authors", + "additional_oa_fee_phrases": [ + { + "language": "en", + "value": "no", + "phrase": "No" + } + ] + } + ] + }, + { + "id": 4410, + "open_access_prohibited": "no", + "urls": [ + { + "url": "https://www.springernature.com/gp/open-research/about/the-fundamentals-of-open-access-and-open-research", + "description": "The fundamentals of open access and open research" + }, + { + "url": "https://www.nature.com/neuro/editorial-policies/self-archiving-and-license-to-publish", + "description": "Self archiving and license to publish" + }, + { + "url": "https://www.springernature.com/gp/open-research/policies/journal-policies", + "description": "Open access policies for journals" + } + ], + "open_access_prohibited_phrases": [ + { + "language": "en", + "phrase": "No", + "value": "no" + } + ], + "internal_moniker": "Open Access", + "publication_count": 34, + "permitted_oa": [ + { + "additional_oa_fee_phrases": [ + { + "language": "en", + "phrase": "Yes", + "value": "yes" + } + ], + "copyright_owner": "authors", + "conditions": [ + "Published source must be acknowledged with citation" + ], + "article_version": [ + "published" + ], + "copyright_owner_phrases": [ + { + "language": "en", + "value": "authors", + "phrase": "Authors" + } + ], + "location": { + "location_phrases": [ + { + "phrase": "Any Website", + "value": "any_website", + "language": "en" + }, + { + "language": "en", + "phrase": "Journal Website", + "value": "this_journal" + } + ], + "location": [ + "any_website", + "this_journal" + ] + }, + "additional_oa_fee": "yes", + "article_version_phrases": [ + { + "phrase": "Published", + "value": "published", + "language": "en" + } + ], + "license": [ + { + "license_phrases": [ + { + "phrase": "CC BY", + "value": "cc_by", + "language": "en" + } + ], + "license": "cc_by", + "version": "4.0" + } + ], + "publisher_deposit": [ + { + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "value": "disciplinary", + "phrase": "Disciplinary" + } + ], + "notes": "Launched as UK PubMed Central (UKPMC) in January 2007, changed to Europe PubMed Central in November 2012.\r\nSpecial item types include: Links", + "url": "http://europepmc.org/", + "type": "disciplinary", + "name": [ + { + "name": "Europe PMC", + "language": "en", + "preferred": "name", + "language_phrases": [ + { + "value": "en", + "phrase": "English", + "language": "en" + } + ], + "preferred_phrases": [ + { + "language": "en", + "phrase": "Name", + "value": "name" + } + ] + } + ] + }, + "system_metadata": { + "id": 908, + "uri": "https://v2.sherpa.ac.uk/id/repository/908" + } + }, + { + "system_metadata": { + "id": 267, + "uri": "https://v2.sherpa.ac.uk/id/repository/267" + }, + "repository_metadata": { + "type_phrases": [ + { + "language": "en", + "phrase": "Disciplinary", + "value": "disciplinary" + } + ], + "type": "disciplinary", + "url": "http://www.ncbi.nlm.nih.gov/pmc/", + "name": [ + { + "language": "en", + "name": "PubMed Central", + "preferred": "name", + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred_phrases": [ + { + "language": "en", + "value": "name", + "phrase": "Name" + } + ] + } + ] + } + } + ] + } + ], + "uri": "https://v2.sherpa.ac.uk/id/publisher_policy/4410" + } + ], + "title": [ + { + "preferred_phrases": [ + { + "language": "en", + "phrase": "Title", + "value": "name" + } + ], + "language_phrases": [ + { + "language": "en", + "value": "en", + "phrase": "English" + } + ], + "preferred": "name", + "title": "Nature Synthesis", + "language": "en" + } + ], + "type": "journal", + "url": "https://www.nature.com/natsynth/" + } + ] +} \ No newline at end of file diff --git a/dspace-iiif/pom.xml b/dspace-iiif/pom.xml index e606523182..d766ff2ec6 100644 --- a/dspace-iiif/pom.xml +++ b/dspace-iiif/pom.xml @@ -57,6 +57,13 @@ org.springframework.boot spring-boot-starter-data-rest ${spring-boot.version} + + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + org.springframework.boot @@ -73,7 +80,6 @@ javax.cache cache-api - 1.1.0 diff --git a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java index 212f1e3406..379f2fa181 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/controller/DSpaceOAIDataProvider.java @@ -72,7 +72,12 @@ public class DSpaceOAIDataProvider { private DSpaceResumptionTokenFormatter resumptionTokenFormat = new DSpaceResumptionTokenFormatter(); - @RequestMapping({"", "/"}) + @RequestMapping("") + public void index(HttpServletResponse response, HttpServletRequest request) throws IOException { + response.sendRedirect(request.getRequestURI() + "/"); + } + + @RequestMapping({"/"}) public String indexAction(HttpServletResponse response, Model model) throws ServletException { try { XOAIManager manager = xoaiManagerResolver.getManager(); diff --git a/dspace-rest/pom.xml b/dspace-rest/pom.xml index 27e4a168dc..1b6416f3fe 100644 --- a/dspace-rest/pom.xml +++ b/dspace-rest/pom.xml @@ -19,7 +19,6 @@ ${basedir}/.. - 5.3.10.RELEASE diff --git a/dspace-server-webapp/pom.xml b/dspace-server-webapp/pom.xml index eff8deae46..4be4794923 100644 --- a/dspace-server-webapp/pom.xml +++ b/dspace-server-webapp/pom.xml @@ -264,6 +264,13 @@ org.springframework.boot spring-boot-starter-data-rest ${spring-boot.version} + + + + com.fasterxml.jackson.datatype + jackson-datatype-jdk8 + + @@ -271,6 +278,12 @@ spring-boot-starter-aop ${spring-boot.version} + + + org.springframework.boot + spring-boot-starter-actuator + ${spring-boot.version} + com.flipkart.zjsonpatch @@ -457,7 +470,6 @@ javax.cache cache-api - 1.1.0 diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java index 459cfe0dee..ec01d8574e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/Application.java @@ -161,6 +161,7 @@ public class Application extends SpringBootServletInitializer { @Override public void addCorsMappings(@NonNull CorsRegistry registry) { // Get allowed origins for api and iiif endpoints. + // The actuator endpoints are configured using management.endpoints.web.cors.* properties String[] corsAllowedOrigins = configuration .getCorsAllowedOrigins(configuration.getCorsAllowedOriginsConfig()); String[] iiifAllowedOrigins = configuration diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java index 7d9cb470f9..313fe2de60 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/AuthenticationRestController.java @@ -7,8 +7,13 @@ */ package org.dspace.app.rest; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + import java.sql.SQLException; import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @@ -19,9 +24,11 @@ import org.dspace.app.rest.model.AuthenticationStatusRest; import org.dspace.app.rest.model.AuthenticationTokenRest; import org.dspace.app.rest.model.AuthnRest; import org.dspace.app.rest.model.EPersonRest; +import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.model.hateoas.AuthenticationStatusResource; import org.dspace.app.rest.model.hateoas.AuthenticationTokenResource; import org.dspace.app.rest.model.hateoas.AuthnResource; +import org.dspace.app.rest.model.hateoas.EmbeddedPage; import org.dspace.app.rest.model.wrapper.AuthenticationToken; import org.dspace.app.rest.projection.Projection; import org.dspace.app.rest.security.RestAuthenticationService; @@ -34,6 +41,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.EntityModel; import org.springframework.hateoas.Link; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; @@ -109,6 +120,8 @@ public class AuthenticationRestController implements InitializingBean { if (context.getCurrentUser() != null) { ePersonRest = converter.toRest(context.getCurrentUser(), projection); } + List groupList = context.getSpecialGroups().stream() + .map(g -> (GroupRest) converter.toRest(g, projection)).collect(Collectors.toList()); AuthenticationStatusRest authenticationStatusRest = new AuthenticationStatusRest(ePersonRest); // When not authenticated add WWW-Authenticate so client can retrieve all available authentication methods @@ -120,11 +133,41 @@ public class AuthenticationRestController implements InitializingBean { } authenticationStatusRest.setAuthenticationMethod(context.getAuthenticationMethod()); authenticationStatusRest.setProjection(projection); + authenticationStatusRest.setSpecialGroups(groupList); + AuthenticationStatusResource authenticationStatusResource = converter.toResource(authenticationStatusRest); return authenticationStatusResource; } + /** + * Check the current user's authentication status (i.e. whether they are authenticated or not) and, + * if authenticated, retrieves the current context's special groups. + * @param page + * @param assembler + * @param request + * @param response + * @return + * @throws SQLException + */ + @RequestMapping(value = "/status/specialGroups", method = RequestMethod.GET) + public EntityModel retrieveSpecialGroups(Pageable page, PagedResourcesAssembler assembler, + HttpServletRequest request, HttpServletResponse response) + throws SQLException { + Context context = ContextUtil.obtainContext(request); + Projection projection = utils.obtainProjection(); + + List groupList = context.getSpecialGroups().stream() + .map(g -> (GroupRest) converter.toRest(g, projection)).collect(Collectors.toList()); + Page groupPage = (Page) utils.getPage(groupList, page); + Link link = linkTo( + methodOn(AuthenticationRestController.class).retrieveSpecialGroups(page, assembler, request, response)) + .withSelfRel(); + + return EntityModel.of(new EmbeddedPage(link.getHref(), + groupPage.map(converter::toResource), null, "specialGroups")); + } + /** * Check whether the login has succeeded or not. The actual login is performed by one of the enabled login filters * (e.g. {@link org.dspace.app.rest.security.StatelessLoginFilter}). diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java index 183aee83d0..01822e5536 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamRestController.java @@ -153,8 +153,9 @@ public class BitstreamRestController { } org.dspace.app.rest.utils.BitstreamResource bitstreamResource = - new org.dspace.app.rest.utils.BitstreamResource( - name, uuid, currentUser != null ? currentUser.getID() : null, citationEnabledForBitstream); + new org.dspace.app.rest.utils.BitstreamResource(name, uuid, + currentUser != null ? currentUser.getID() : null, + context.getSpecialGroupUuids(), citationEnabledForBitstream); //We have all the data we need, close the connection to the database so that it doesn't stay open during //download/streaming diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 500176a348..6a760b7d08 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -23,6 +23,7 @@ import javax.xml.transform.stream.StreamResult; import org.apache.logging.log4j.Logger; import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.app.rest.utils.ScopeResolver; import org.dspace.app.util.SyndicationFeed; import org.dspace.app.util.factory.UtilServiceFactory; import org.dspace.app.util.service.OpenSearchService; @@ -35,12 +36,17 @@ import org.dspace.core.Context; import org.dspace.core.LogHelper; import org.dspace.core.Utils; import org.dspace.discovery.DiscoverQuery; +import org.dspace.discovery.DiscoverQuery.SORT_ORDER; import org.dspace.discovery.DiscoverResult; import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.SearchUtils; import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.discovery.configuration.DiscoverySearchFilter; +import org.dspace.discovery.indexobject.IndexableItem; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; @@ -67,8 +73,17 @@ public class OpenSearchController { private AuthorizeService authorizeService; private OpenSearchService openSearchService; + @Autowired + private SearchService searchService; + + @Autowired + private DiscoveryConfigurationService searchConfigurationService; + private Context context; + @Autowired + private ScopeResolver scopeResolver; + /** * This method provides the OpenSearch query on the path /search * It will pass the result as a OpenSearchDocument directly to the client @@ -80,6 +95,9 @@ public class OpenSearchController { @RequestParam(name = "start", required = false) Integer start, @RequestParam(name = "rpp", required = false) Integer count, @RequestParam(name = "format", required = false) String format, + @RequestParam(name = "sort", required = false) String sort, + @RequestParam(name = "sort_direction", required = false) String sortDirection, + @RequestParam(name = "scope", required = false) String dsoObject, Model model) throws IOException, ServletException { context = ContextUtil.obtainContext(request); if (start == null) { @@ -115,9 +133,34 @@ public class OpenSearchController { // support pagination parameters DiscoverQuery queryArgs = new DiscoverQuery(); - queryArgs.setQuery(query); + if (query == null) { + query = ""; + } else { + queryArgs.setQuery(query); + } queryArgs.setStart(start); queryArgs.setMaxResults(count); + queryArgs.setDSpaceObjectFilter(IndexableItem.TYPE); + if (sort != null) { + //this is the default sort so we want to switch this to date accessioned + if (sortDirection != null && sortDirection.equals("DESC")) { + queryArgs.setSortField(sort + "_sort", SORT_ORDER.desc); + } else { + queryArgs.setSortField(sort + "_sort", SORT_ORDER.asc); + } + } else { + queryArgs.setSortField("dc.date.accessioned_dt", SORT_ORDER.desc); + } + if (dsoObject != null) { + container = scopeResolver.resolveScope(context, dsoObject); + DiscoveryConfiguration discoveryConfiguration = searchConfigurationService + .getDiscoveryConfigurationByNameOrDso("site", container); + queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); + queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() + .toArray( + new String[discoveryConfiguration.getDefaultFilterQueries() + .size()])); + } // Perform the search DiscoverResult qResults = null; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java new file mode 100644 index 0000000000..e772aa0abe --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyEPersonReplaceRestController.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; +import static org.dspace.core.Constants.EPERSON; +import static org.springframework.web.bind.annotation.RequestMethod.PUT; + +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * This controller will handle all the incoming calls on the/api/authz/resourcepolicies/{id}/eperson endpoint + * where the id corresponds to the ResourcePolicy of which you want to replace the related EPerson. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@RestController +@RequestMapping("/api/authz/resourcepolicies" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + "/eperson") +public class ResourcePolicyEPersonReplaceRestController { + + @Autowired + private Utils utils; + @Autowired + private ResourcePolicyService resourcePolicyService; + + @PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')") + @RequestMapping(method = PUT, consumes = {"text/uri-list"}) + public ResponseEntity> replaceEPersonOfResourcePolicy(@PathVariable Integer id, + HttpServletResponse response, HttpServletRequest request) throws SQLException, AuthorizeException { + + Context context = obtainContext(request); + List dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request)); + + if (dsoList.size() != 1 || dsoList.get(0).getType() != EPERSON) { + throw new UnprocessableEntityException( + "The EPerson doesn't exist or the data cannot be resolved to an EPerson."); + } + + ResourcePolicy resourcePolicy = resourcePolicyService.find(context, id); + if (Objects.isNull(resourcePolicy)) { + throw new ResourceNotFoundException("ResourcePolicy with id: " + id + " not found"); + } + + if (Objects.isNull(resourcePolicy.getEPerson())) { + throw new UnprocessableEntityException("ResourcePolicy with id:" + id + " doesn't link to an EPerson"); + } + EPerson newEPerson = (EPerson) dsoList.get(0); + resourcePolicy.setEPerson(newEPerson); + context.commit(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java new file mode 100644 index 0000000000..e9ba0dff44 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ResourcePolicyGroupReplaceRestController.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; +import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; +import static org.dspace.core.Constants.GROUP; +import static org.springframework.web.bind.annotation.RequestMethod.PUT; + +import java.sql.SQLException; +import java.util.List; +import java.util.Objects; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.utils.Utils; +import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.ResourcePolicyService; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; +import org.dspace.eperson.Group; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * This controller will handle all the incoming calls on the/api/authz/resourcepolicies/{id}/group endpoint + * where the id corresponds to the ResourcePolicy of which you want to replace the related Group. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@RestController +@RequestMapping("/api/authz/resourcepolicies" + REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT + "/group") +public class ResourcePolicyGroupReplaceRestController { + + @Autowired + private Utils utils; + @Autowired + private ResourcePolicyService resourcePolicyService; + + @PreAuthorize("hasPermission(#id, 'resourcepolicy', 'ADMIN')") + @RequestMapping(method = PUT, consumes = {"text/uri-list"}) + public ResponseEntity> replaceGroupOfResourcePolicy(@PathVariable Integer id, + HttpServletResponse response, HttpServletRequest request) throws SQLException, AuthorizeException { + + Context context = obtainContext(request); + List dsoList = utils.constructDSpaceObjectList(context, utils.getStringListFromRequest(request)); + + if (dsoList.size() != 1 || dsoList.get(0).getType() != GROUP) { + throw new UnprocessableEntityException("The Group doesn't exist or the data cannot be resolved to a Group"); + } + + ResourcePolicy resourcePolicy = resourcePolicyService.find(context, id); + if (Objects.isNull(resourcePolicy)) { + throw new ResourceNotFoundException("ResourcePolicy with id: " + id + " not found!"); + } + + if (Objects.isNull(resourcePolicy.getGroup())) { + throw new UnprocessableEntityException("ResourcePolicy with id:" + id + " doesn't link to a Group"); + } + + Group newGroup = (Group) dsoList.get(0); + resourcePolicy.setGroup(newGroup); + context.commit(); + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CanClaimItemFeature.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CanClaimItemFeature.java new file mode 100644 index 0000000000..91974659a9 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/authorization/impl/CanClaimItemFeature.java @@ -0,0 +1,96 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization.impl; + +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.profile.service.ResearcherProfileService; +import org.dspace.app.rest.authorization.AuthorizationFeature; +import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation; +import org.dspace.app.rest.model.BaseObjectRest; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.MetadataValue; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Checks if the given user can claim the given item. An item can be claimed + * only if the show claim is enabled for it (see + * {@link org.dspace.app.rest.authorization.impl.ShowClaimItemFeature}). + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Component +@AuthorizationFeatureDocumentation(name = CanClaimItemFeature.NAME, + description = "Used to verify if the current user is able to claim this item as their profile. " + + "Only available if the current item is not already claimed.") +public class CanClaimItemFeature implements AuthorizationFeature { + + public static final String NAME = "canClaimItem"; + + private static final Logger LOG = LoggerFactory.getLogger(CanClaimItemFeature.class); + + @Autowired + private ItemService itemService; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Override + @SuppressWarnings("rawtypes") + public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException { + + if (!(object instanceof ItemRest) || context.getCurrentUser() == null) { + return false; + } + + String id = ((ItemRest) object).getId(); + Item item = itemService.find(context, UUID.fromString(id)); + + return researcherProfileService.hasProfileType(item) + && hasNotOwner(item) + && hasNotAlreadyAProfile(context) + && haveSameEmail(item, context.getCurrentUser()); + } + + private boolean hasNotAlreadyAProfile(Context context) { + try { + return researcherProfileService.findById(context, context.getCurrentUser().getID()) == null; + } catch (SQLException | AuthorizeException e) { + LOG.warn("Error while checking if eperson has a ResearcherProfileAssociated: {}", e.getMessage(), e); + return false; + } + } + + private boolean hasNotOwner(Item item) { + return StringUtils.isBlank(itemService.getMetadata(item, "dspace.object.owner")); + } + + private boolean haveSameEmail(Item item, EPerson currentUser) { + return itemService.getMetadataByMetadataString(item, "person.email").stream() + .map(MetadataValue::getValue) + .filter(StringUtils::isNotBlank) + .anyMatch(email -> email.equalsIgnoreCase(currentUser.getEmail())); + } + + @Override + public String[] getSupportedTypes() { + return new String[] { ItemRest.CATEGORY + "." + ItemRest.NAME }; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/configuration/ActuatorConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/configuration/ActuatorConfiguration.java new file mode 100644 index 0000000000..ad78fe2db4 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/configuration/ActuatorConfiguration.java @@ -0,0 +1,95 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.configuration; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.util.Arrays; + +import org.apache.solr.client.solrj.SolrServerException; +import org.dspace.app.rest.DiscoverableEndpointsService; +import org.dspace.app.rest.health.GeoIpHealthIndicator; +import org.dspace.authority.AuthoritySolrServiceImpl; +import org.dspace.discovery.SolrSearchCore; +import org.dspace.statistics.SolrStatisticsCore; +import org.dspace.xoai.services.api.solr.SolrServerResolver; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.actuate.autoconfigure.health.ConditionalOnEnabledHealthIndicator; +import org.springframework.boot.actuate.health.Status; +import org.springframework.boot.actuate.solr.SolrHealthIndicator; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.event.EventListener; +import org.springframework.hateoas.Link; + +/** + * Configuration class related to the actuator endpoints. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Configuration +public class ActuatorConfiguration { + + public static final Status UP_WITH_ISSUES_STATUS = new Status("UP_WITH_ISSUES"); + + @Autowired + private DiscoverableEndpointsService discoverableEndpointsService; + + @Value("${management.endpoints.web.base-path:/actuator}") + private String actuatorBasePath; + + @EventListener(ApplicationReadyEvent.class) + public void registerActuatorEndpoints() { + discoverableEndpointsService.register(this, Arrays.asList(Link.of(actuatorBasePath, "actuator"))); + } + + @Bean + @ConditionalOnEnabledHealthIndicator("solrSearch") + @ConditionalOnProperty("discovery.search.server") + public SolrHealthIndicator solrSearchCoreHealthIndicator(SolrSearchCore solrSearchCore) { + return new SolrHealthIndicator(solrSearchCore.getSolr()); + } + + @Bean + @ConditionalOnEnabledHealthIndicator("solrStatistics") + @ConditionalOnProperty("solr-statistics.server") + public SolrHealthIndicator solrStatisticsCoreHealthIndicator(SolrStatisticsCore solrStatisticsCore) { + return new SolrHealthIndicator(solrStatisticsCore.getSolr()); + } + + @Bean + @ConditionalOnEnabledHealthIndicator("solrAuthority") + @ConditionalOnProperty("solr.authority.server") + public SolrHealthIndicator solrAuthorityCoreHealthIndicator(AuthoritySolrServiceImpl authoritySolrService) + throws MalformedURLException, SolrServerException, IOException { + return new SolrHealthIndicator(authoritySolrService.getSolr()); + } + + @Bean + @ConditionalOnEnabledHealthIndicator("solrOai") + @ConditionalOnProperty("oai.solr.url") + public SolrHealthIndicator solrOaiCoreHealthIndicator(SolrServerResolver solrServerResolver) + throws SolrServerException { + return new SolrHealthIndicator(solrServerResolver.getServer()); + } + + @Bean + @ConditionalOnEnabledHealthIndicator("geoIp") + public GeoIpHealthIndicator geoIpHealthIndicator() { + return new GeoIpHealthIndicator(); + } + + public String getActuatorBasePath() { + return actuatorBasePath; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ResearcherProfileConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ResearcherProfileConverter.java new file mode 100644 index 0000000000..7074065105 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/ResearcherProfileConverter.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; + +import org.dspace.app.profile.ResearcherProfile; +import org.dspace.app.rest.model.ResearcherProfileRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.content.Item; +import org.springframework.stereotype.Component; + +/** + * This converter is responsible for transforming an model that represent a + * ResearcherProfile to the REST representation of an ResearcherProfile. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Component +public class ResearcherProfileConverter implements DSpaceConverter { + + @Override + public ResearcherProfileRest convert(ResearcherProfile profile, Projection projection) { + ResearcherProfileRest researcherProfileRest = new ResearcherProfileRest(); + + researcherProfileRest.setVisible(profile.isVisible()); + researcherProfileRest.setId(profile.getId()); + researcherProfileRest.setProjection(projection); + + Item item = profile.getItem(); + + return researcherProfileRest; + } + + @Override + public Class getModelClass() { + return ResearcherProfile.class; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java index 4555d8b00a..4febcd5594 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionFormConverter.java @@ -155,6 +155,7 @@ public class SubmissionFormConverter implements DSpaceConverterExtend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

+ * + */ +public class GroupHasPendingWorkflowTasksException + extends UnprocessableEntityException implements TranslatableException { + public static final String MESSAGE_KEY = + "org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message"; + + public GroupHasPendingWorkflowTasksException() { + super(I18nUtil.getMessage(MESSAGE_KEY)); + } + + public GroupHasPendingWorkflowTasksException(Throwable cause) { + super(I18nUtil.getMessage(MESSAGE_KEY), cause); + } + + public String getMessageKey() { + return MESSAGE_KEY; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/health/GeoIpHealthIndicator.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/health/GeoIpHealthIndicator.java new file mode 100644 index 0000000000..191bb4f1ef --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/health/GeoIpHealthIndicator.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.health; + +import static org.dspace.app.rest.configuration.ActuatorConfiguration.UP_WITH_ISSUES_STATUS; + +import org.dspace.statistics.GeoIpService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.actuate.health.AbstractHealthIndicator; +import org.springframework.boot.actuate.health.Health.Builder; +import org.springframework.boot.actuate.health.HealthIndicator; + +/** + * Implementation of {@link HealthIndicator} that verifies if the GeoIP database + * is configured correctly. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class GeoIpHealthIndicator extends AbstractHealthIndicator { + + @Autowired + private GeoIpService geoIpService; + + @Override + protected void doHealthCheck(Builder builder) throws Exception { + + try { + geoIpService.getDatabaseReader(); + builder.up(); + } catch (IllegalStateException ex) { + builder.status(UP_WITH_ISSUES_STATUS).withDetail("reason", ex.getMessage()); + } + + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/HalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/HalLinkFactory.java index 659c0be517..86719847d8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/HalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/HalLinkFactory.java @@ -43,10 +43,12 @@ public abstract class HalLinkFactory { return list; } - + /** + * Please note that this method could lead to double encoding. + * See: https://github.com/DSpace/DSpace/issues/8333 + */ protected Link buildLink(String rel, T data) { UriComponentsBuilder uriComponentsBuilder = uriBuilder(data); - return buildLink(rel, uriComponentsBuilder.build().toUriString()); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/externalsources/ExternalSourceEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/externalsources/ExternalSourceEntryHalLinkFactory.java index 0c0f9678b8..a5189fc844 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/externalsources/ExternalSourceEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/externalsources/ExternalSourceEntryHalLinkFactory.java @@ -7,13 +7,15 @@ */ package org.dspace.app.rest.link.externalsources; +import static org.springframework.hateoas.IanaLinkRelations.SELF; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; + import java.util.LinkedList; import org.dspace.app.rest.ExternalSourcesRestController; import org.dspace.app.rest.link.HalLinkFactory; import org.dspace.app.rest.model.hateoas.ExternalSourceEntryResource; import org.springframework.data.domain.Pageable; -import org.springframework.hateoas.IanaLinkRelations; import org.springframework.hateoas.Link; import org.springframework.stereotype.Component; @@ -26,12 +28,19 @@ public class ExternalSourceEntryHalLinkFactory @Override protected void addLinks(ExternalSourceEntryResource halResource, Pageable pageable, LinkedList list) - throws Exception { - - list.add(buildLink(IanaLinkRelations.SELF.value(), - getMethodOn().getExternalSourceEntryValue(halResource.getContent().getExternalSource(), - halResource.getContent().getId()))); + throws Exception { + String externalSource = halResource.getContent().getExternalSource(); + String id = halResource.getContent().getId(); + list.add(buildLink(getMethodOn().getExternalSourceEntryValue(externalSource, id), SELF.value())); + } + /** + * FIXME + * This custom logic should be removed when Some HAL links + * show a double encoding https://github.com/DSpace/DSpace/issues/8333 is fixed. + */ + private Link buildLink(T data, String rel) { + return linkTo(data).withRel(rel); } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/PostLoggedInAction.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/PostLoggedInAction.java new file mode 100644 index 0000000000..d288ef1ecf --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/PostLoggedInAction.java @@ -0,0 +1,27 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.login; + +import org.dspace.core.Context; + +/** + * Interface for classes that need to perform some operations after the user + * login. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public interface PostLoggedInAction { + + /** + * Perform some operations after the user login. + * + * @param context the DSpace context + */ + public void loggedIn(Context context); +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/impl/ResearcherProfileAutomaticClaim.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/impl/ResearcherProfileAutomaticClaim.java new file mode 100644 index 0000000000..4d7242342d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/login/impl/ResearcherProfileAutomaticClaim.java @@ -0,0 +1,141 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.login.impl; + +import static org.apache.commons.collections4.IteratorUtils.toList; +import static org.apache.commons.lang3.StringUtils.isBlank; +import static org.dspace.content.authority.Choices.CF_ACCEPTED; + +import java.sql.SQLException; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.profile.service.ResearcherProfileService; +import org.dspace.app.rest.login.PostLoggedInAction; +import org.dspace.authorize.AuthorizeException; +import org.dspace.content.Item; +import org.dspace.content.MetadataFieldName; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.eperson.service.EPersonService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.Assert; + +/** + * Implementation of {@link PostLoggedInAction} that perform an automatic claim + * between the logged eperson and possible profiles without eperson present in + * the system. This pairing between eperson and profile is done starting from + * the configured metadata of the logged in user. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfileAutomaticClaim implements PostLoggedInAction { + + private final static Logger LOGGER = LoggerFactory.getLogger(ResearcherProfileAutomaticClaim.class); + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Autowired + private ItemService itemService; + + @Autowired + private EPersonService ePersonService; + + /** + * The field of the eperson to search for. + */ + private final String ePersonField; + + /** + * The field of the profile item to search. + */ + private final String profileField; + + public ResearcherProfileAutomaticClaim(String ePersonField, String profileField) { + Assert.notNull(ePersonField, "An eperson field is required to perform automatic claim"); + Assert.notNull(profileField, "An profile field is required to perform automatic claim"); + this.ePersonField = ePersonField; + this.profileField = profileField; + } + + @Override + public void loggedIn(Context context) { + + if (isBlank(researcherProfileService.getProfileType())) { + return; + } + + EPerson currentUser = context.getCurrentUser(); + if (currentUser == null) { + return; + } + + try { + claimProfile(context, currentUser); + } catch (SQLException | AuthorizeException e) { + LOGGER.error("An error occurs during the profile claim by email", e); + } + + } + + private void claimProfile(Context context, EPerson currentUser) throws SQLException, AuthorizeException { + + UUID id = currentUser.getID(); + String fullName = currentUser.getFullName(); + + if (currentUserHasAlreadyResearcherProfile(context)) { + return; + } + + Item item = findClaimableProfile(context, currentUser); + if (item != null) { + itemService.addMetadata(context, item, "dspace", "object", "owner", + null, fullName, id.toString(), CF_ACCEPTED); + } + + } + + private boolean currentUserHasAlreadyResearcherProfile(Context context) throws SQLException, AuthorizeException { + return researcherProfileService.findById(context, context.getCurrentUser().getID()) != null; + } + + private Item findClaimableProfile(Context context, EPerson currentUser) throws SQLException, AuthorizeException { + + String value = getValueToSearchFor(context, currentUser); + if (StringUtils.isEmpty(value)) { + return null; + } + + List items = toList(itemService.findArchivedByMetadataField(context, profileField, value)).stream() + .filter(this::hasNotOwner) + .filter(researcherProfileService::hasProfileType) + .collect(Collectors.toList()); + + return items.size() == 1 ? items.get(0) : null; + } + + private String getValueToSearchFor(Context context, EPerson currentUser) { + if ("email".equals(ePersonField)) { + return currentUser.getEmail(); + } + return ePersonService.getMetadataFirstValue(currentUser, new MetadataFieldName(ePersonField), Item.ANY); + } + + private boolean hasNotOwner(Item item) { + return CollectionUtils.isEmpty(itemService.getMetadata(item, "dspace", "object", "owner", Item.ANY)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AccessStatusRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AccessStatusRest.java new file mode 100644 index 0000000000..c7dc2d1198 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AccessStatusRest.java @@ -0,0 +1,49 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonProperty.Access; + +/** + * The Access Status REST Resource. + */ +public class AccessStatusRest implements RestModel { + public static final String NAME = "accessStatus"; + + String status; + + @Override + @JsonProperty(access = Access.READ_ONLY) + public String getType() { + return NAME; + } + + @Override + @JsonIgnore + public String getTypePlural() { + return getType(); + } + + public AccessStatusRest() { + setStatus(null); + } + + public AccessStatusRest(String status) { + setStatus(status); + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AuthenticationStatusRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AuthenticationStatusRest.java index 81a59bbd69..784c06e059 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AuthenticationStatusRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/AuthenticationStatusRest.java @@ -7,9 +7,12 @@ */ package org.dspace.app.rest.model; +import java.util.List; + import com.fasterxml.jackson.annotation.JsonIgnore; import org.dspace.app.rest.RestResourceController; + /** * Find out your authentication status. */ @@ -18,7 +21,11 @@ public class AuthenticationStatusRest extends BaseObjectRest { private boolean authenticated; private String authenticationMethod; + private EPersonRest ePersonRest; + private List specialGroups; + public static final String NAME = "status"; + public static final String SPECIALGROUPS = "specialGroups"; public static final String CATEGORY = RestAddressableModel.AUTHENTICATION; @Override @@ -41,9 +48,6 @@ public class AuthenticationStatusRest extends BaseObjectRest { return RestResourceController.class; } - - private EPersonRest ePersonRest; - public AuthenticationStatusRest() { setOkay(true); setAuthenticated(false); @@ -90,4 +94,14 @@ public class AuthenticationStatusRest extends BaseObjectRest { public void setAuthenticationMethod(final String authenticationMethod) { this.authenticationMethod = authenticationMethod; } + + public void setSpecialGroups(List groupList) { + this.specialGroups = groupList; + } + + @LinkRest(name = "specialGroups") + @JsonIgnore + public List getSpecialGroups() { + return specialGroups; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ItemRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ItemRest.java index 5897f73944..63004b68d2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ItemRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ItemRest.java @@ -17,6 +17,10 @@ import com.fasterxml.jackson.annotation.JsonProperty; * @author Andrea Bollini (andrea.bollini at 4science.it) */ @LinksRest(links = { + @LinkRest( + name = ItemRest.ACCESS_STATUS, + method = "getAccessStatus" + ), @LinkRest( name = ItemRest.BUNDLES, method = "getBundles" @@ -51,6 +55,7 @@ public class ItemRest extends DSpaceObjectRest { public static final String PLURAL_NAME = "items"; public static final String CATEGORY = RestAddressableModel.CORE; + public static final String ACCESS_STATUS = "accessStatus"; public static final String BUNDLES = "bundles"; public static final String MAPPED_COLLECTIONS = "mappedCollections"; public static final String OWNING_COLLECTION = "owningCollection"; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RelationshipRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RelationshipRest.java index e1aeb3ff6f..dd35a0726e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RelationshipRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/RelationshipRest.java @@ -18,10 +18,18 @@ import org.dspace.app.rest.RestResourceController; * This class acts as a data holder for the RelationshipResource * Refer to {@link org.dspace.content.Relationship} for explanation about the properties */ +@LinksRest(links = { + @LinkRest( + name = RelationshipRest.RELATIONSHIP_TYPE, + method = "getRelationshipType" + ) +}) public class RelationshipRest extends BaseObjectRest { public static final String NAME = "relationship"; public static final String CATEGORY = "core"; + public static final String RELATIONSHIP_TYPE = "relationshipType"; + @JsonIgnore private UUID leftId; @JsonIgnore diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ResearcherProfileRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ResearcherProfileRest.java new file mode 100644 index 0000000000..ef145d5e71 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/ResearcherProfileRest.java @@ -0,0 +1,57 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model; + +import java.util.UUID; + +import org.dspace.app.rest.RestResourceController; + +/** + * The Researcher Profile REST resource. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@LinksRest(links = { + @LinkRest(name = ResearcherProfileRest.ITEM, method = "getItem"), + @LinkRest(name = ResearcherProfileRest.EPERSON, method = "getEPerson") +}) +public class ResearcherProfileRest extends BaseObjectRest { + + private static final long serialVersionUID = 1L; + public static final String CATEGORY = RestModel.EPERSON; + public static final String NAME = "profile"; + + public static final String ITEM = "item"; + public static final String EPERSON = "eperson"; + + private boolean visible; + + public boolean isVisible() { + return visible; + } + + public void setVisible(boolean visible) { + this.visible = visible; + } + + @Override + public String getType() { + return NAME; + } + + @Override + public String getCategory() { + return CATEGORY; + } + + @Override + public Class getController() { + return RestResourceController.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java index eb724ecbdb..dbf61c06bc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SubmissionFormFieldRest.java @@ -83,6 +83,11 @@ public class SubmissionFormFieldRest { */ private List languageCodes; + /** + * The list of type bind value + */ + private List typeBind; + /** * Getter for {@link #selectableMetadata} * @@ -266,6 +271,14 @@ public class SubmissionFormFieldRest { } } + public List getTypeBind() { + return typeBind; + } + + public void setTypeBind(List typeBind) { + this.typeBind = typeBind; + } + public SelectableRelationship getSelectableRelationship() { return selectableRelationship; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/AccessStatusResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/AccessStatusResource.java new file mode 100644 index 0000000000..c5cd2a5aee --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/AccessStatusResource.java @@ -0,0 +1,31 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model.hateoas; + +import com.fasterxml.jackson.annotation.JsonUnwrapped; +import org.dspace.app.rest.model.AccessStatusRest; +import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; + +/** + * Access Status Rest HAL Resource. The HAL Resource wraps the REST Resource + * adding support for the links and embedded resources + */ +@RelNameDSpaceResource(AccessStatusRest.NAME) +public class AccessStatusResource extends HALResource { + + @JsonUnwrapped + private AccessStatusRest data; + + public AccessStatusResource(AccessStatusRest entry) { + super(entry); + } + + public AccessStatusRest getData() { + return data; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/ResearcherProfileResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/ResearcherProfileResource.java new file mode 100644 index 0000000000..3b034c1506 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/ResearcherProfileResource.java @@ -0,0 +1,29 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model.hateoas; + +import org.dspace.app.rest.model.ResearcherProfileRest; +import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; +import org.dspace.app.rest.utils.Utils; + +/** + * This class serves as a wrapper class to wrap the SearchConfigurationRest into + * a HAL resource. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RelNameDSpaceResource(ResearcherProfileRest.NAME) +public class ResearcherProfileResource extends DSpaceResource { + + public ResearcherProfileResource(ResearcherProfileRest data, Utils utils) { + super(data, utils); + } + + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/step/SherpaPolicy.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/step/SherpaPolicy.java new file mode 100644 index 0000000000..1f6bd07c2a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/step/SherpaPolicy.java @@ -0,0 +1,48 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model.step; + +import java.util.Date; + +import org.dspace.app.sherpa.v2.SHERPAResponse; + +/** + * Java Bean to expose Sherpa policies during in progress submission. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SherpaPolicy implements SectionData { + + private static final long serialVersionUID = 2440249335255683173L; + + private Date retrievalTime; + + private SHERPAResponse sherpaResponse; + + public Date getRetrievalTime() { + return retrievalTime; + } + + public void setRetrievalTime(Date retrievalTime) { + this.retrievalTime = retrievalTime; + } + + public SHERPAResponse getSherpaResponse() { + return sherpaResponse; + } + + /** + * Setting a sherpaResponse will automatically set the retrievealTime + * of the section copying the value from the response if not null + */ + public void setSherpaResponse(SHERPAResponse sherpaResponse) { + this.sherpaResponse = sherpaResponse; + this.retrievalTime = sherpaResponse.getRetrievalTime(); + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CollectionRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CollectionRestRepository.java index 4a769709d3..ba3163a444 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CollectionRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CollectionRestRepository.java @@ -24,6 +24,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.BitstreamRest; @@ -695,8 +696,8 @@ public class CollectionRestRepository extends DSpaceObjectRestRepository { + + public static final String NO_VISIBILITY_CHANGE_MSG = "Refused to perform the Researcher Profile patch based " + + "on a token without changing the visibility"; + + @Autowired + private ResearcherProfileService researcherProfileService; + + @Autowired + private DSpacePermissionEvaluator permissionEvaluator; + + @Autowired + private EPersonService ePersonService; + + @Autowired + private ResourcePatch resourcePatch; + + @Override + @PreAuthorize("hasPermission(#id, 'PROFILE', 'READ')") + public ResearcherProfileRest findOne(Context context, UUID id) { + try { + ResearcherProfile profile = researcherProfileService.findById(context, id); + if (profile == null) { + return null; + } + return converter.toRest(profile, utils.obtainProjection()); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Create a new researcher profile from scratch. + */ + @Override + @PreAuthorize("isAuthenticated()") + protected ResearcherProfileRest createAndReturn(Context context) throws AuthorizeException, SQLException { + + UUID id = getEPersonIdFromRequest(context); + if (isNotAuthorized(id, "WRITE")) { + throw new AuthorizeException("User unauthorized to create a new profile for user " + id); + } + + EPerson ePerson = ePersonService.find(context, id); + if (ePerson == null) { + throw new UnprocessableEntityException("No EPerson exists with id: " + id); + } + + try { + ResearcherProfile newProfile = researcherProfileService.createAndReturn(context, ePerson); + return converter.toRest(newProfile, utils.obtainProjection()); + } catch (SearchServiceException e) { + throw new RuntimeException(e.getMessage(), e); + } + + } + + /** + * Create a new researcher profile claiming an already existing item. + */ + @Override + protected ResearcherProfileRest createAndReturn(final Context context, final List list) + throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException { + if (CollectionUtils.isEmpty(list) || list.size() > 1) { + throw new IllegalArgumentException("Uri list must contain exactly one element"); + } + + + UUID id = getEPersonIdFromRequest(context); + if (isNotAuthorized(id, "WRITE")) { + throw new AuthorizeException("User unauthorized to create a new profile for user " + id); + } + + EPerson ePerson = ePersonService.find(context, id); + if (ePerson == null) { + throw new UnprocessableEntityException("No EPerson exists with id: " + id); + } + + try { + ResearcherProfile newProfile = researcherProfileService + .claim(context, ePerson, URI.create(list.get(0))); + return converter.toRest(newProfile, utils.obtainProjection()); + } catch (SearchServiceException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + @Override + public Page findAll(Context context, Pageable pageable) { + throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); + } + + @Override + @PreAuthorize("hasPermission(#id, 'PROFILE', 'DELETE')") + protected void delete(Context context, UUID id) { + try { + researcherProfileService.deleteById(context, id); + } catch (SQLException | AuthorizeException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + @Override + @PreAuthorize("hasPermission(#id, 'PROFILE', #patch)") + protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, + UUID id, Patch patch) throws SQLException, AuthorizeException { + + ResearcherProfile profile = researcherProfileService.findById(context, id); + if (profile == null) { + throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + id + " not found"); + } + + resourcePatch.patch(context, profile, patch.getOperations()); + + } + + @Override + public Class getDomainClass() { + return ResearcherProfileRest.class; + } + + + private UUID getEPersonIdFromRequest(Context context) { + HttpServletRequest request = getRequestService().getCurrentRequest().getHttpServletRequest(); + + String ePersonId = request.getParameter("eperson"); + if (ePersonId == null) { + return context.getCurrentUser().getID(); + } + + UUID uuid = UUIDUtils.fromString(ePersonId); + if (uuid == null) { + throw new DSpaceBadRequestException("The provided eperson parameter is not a valid uuid"); + } + return uuid; + } + + private boolean isNotAuthorized(UUID id, String permission) { + Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); + return !permissionEvaluator.hasPermission(authentication, id, "PROFILE", permission); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/service/UriListHandlerService.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/service/UriListHandlerService.java index 30887fd4b1..5366428b1c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/service/UriListHandlerService.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/handler/service/UriListHandlerService.java @@ -11,6 +11,8 @@ import java.sql.SQLException; import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.repository.handler.UriListHandler; import org.dspace.authorize.AuthorizeException; @@ -18,6 +20,7 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; + /** * This class is a wrapper Service class for the {@link UriListHandler} objects. It will find the right one and try to * execute it for the given arguments @@ -25,6 +28,8 @@ import org.springframework.stereotype.Component; @Component public class UriListHandlerService { + private final static Logger log = LogManager.getLogger(); + @Autowired private List uriListHandlers; @@ -60,4 +65,5 @@ public class UriListHandlerService { throw new DSpaceBadRequestException("No UriListHandler was found that supports the inputs given"); } -} + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ResearcherProfileVisibleReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ResearcherProfileVisibleReplaceOperation.java new file mode 100644 index 0000000000..4052fe5395 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/ResearcherProfileVisibleReplaceOperation.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation; + +import java.sql.SQLException; + +import org.dspace.app.profile.ResearcherProfile; +import org.dspace.app.profile.service.ResearcherProfileService; +import org.dspace.app.rest.exception.RESTAuthorizationException; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Implementation for ResearcherProfile visibility patches. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/eperson/profiles/<:id-eperson> -H " + * Content-Type: application/json" -d '[{ "op": "replace", "path": " + * /visible", "value": true]' + * + */ +@Component +public class ResearcherProfileVisibleReplaceOperation extends PatchOperation { + + @Autowired + private ResearcherProfileService researcherProfileService; + + /** + * Path in json body of patch that uses this operation. + */ + public static final String OPERATION_VISIBLE_CHANGE = "/visible"; + + @Override + public ResearcherProfile perform(Context context, ResearcherProfile profile, Operation operation) + throws SQLException { + + Object value = operation.getValue(); + if (value == null | !(value instanceof Boolean)) { + throw new UnprocessableEntityException("The /visible value must be a boolean (true|false)"); + } + + try { + researcherProfileService.changeVisibility(context, profile, (boolean) value); + } catch (AuthorizeException e) { + throw new RESTAuthorizationException("Unauthorized user for profile visibility change"); + } + + return profile; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof ResearcherProfile + && operation.getOp().trim().equalsIgnoreCase(OPERATION_REPLACE) + && operation.getPath().trim().equalsIgnoreCase(OPERATION_VISIBLE_CHANGE)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyActionAddOrReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyActionAddOrReplaceOperation.java new file mode 100644 index 0000000000..2227c4e075 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyActionAddOrReplaceOperation.java @@ -0,0 +1,70 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation.resourcePolicy; + +import static org.dspace.app.rest.repository.patch.operation.resourcePolicy.ResourcePolicyUtils.OPERATION_PATH_ACTION; + +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.exception.UnprocessableEntityException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.repository.patch.operation.PatchOperation; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.core.Constants; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Implementation for ResourcePolicy action ADD or REPLACE patch. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/authz/resourcepolicies/<:id-resourcepolicy> -H " + * Content-Type: application/json" -d '[{ "op": "replace", "path": " + * /action", "value": 2]' + * + * + * @author Emanuele Ballarini (emanuele.ballarini@4science.com) + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@Component +public class ResourcePolicyActionAddOrReplaceOperation extends PatchOperation { + + @Override + public R perform(Context context, R resource, Operation operation) { + checkOperationValue(operation.getValue()); + if (this.supports(resource, operation)) { + ResourcePolicy resourcePolicy = (ResourcePolicy) resource; + this.replace(resourcePolicy, operation); + return resource; + } else { + throw new DSpaceBadRequestException(this.getClass() + " does not support this operation"); + } + } + + /** + * Performs the actual add or replace action of resourcePolicy operation. Both + * actions are allowed since the starting value of action is a defined int. + * + * @param resourcePolicy resourcePolicy being patched + * @param operation patch operation + */ + private void replace(ResourcePolicy resourcePolicy, Operation operation) { + int action = Constants.getActionID(operation.getValue().toString()); + if (action < 0 || action > Constants.actionText.length) { + throw new UnprocessableEntityException(action + "is not defined"); + } + resourcePolicy.setAction(action); + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof ResourcePolicy + && (operation.getOp().trim().equalsIgnoreCase(OPERATION_ADD) + || operation.getOp().trim().equalsIgnoreCase(OPERATION_REPLACE)) + && operation.getPath().trim().equalsIgnoreCase(OPERATION_PATH_ACTION)); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeAddOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeAddOperation.java new file mode 100644 index 0000000000..afdcef6656 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeAddOperation.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation.resourcePolicy; + +import static org.dspace.app.rest.repository.patch.operation.resourcePolicy.ResourcePolicyUtils.OPERATION_PATH_POLICY_TYPE; + +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.repository.patch.operation.PatchOperation; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * Implementation for ResourcePolicy policyType ADD patch. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/authz/resourcepolicies/<:id-resourcepolicy> -H " + * Content-Type: application/json" -d '[{ "op": "add", "path": " + * /policyType", "value": "TYPE_SUBMISSION"]' + * + * + * @author Emanuele Ballarini (emanuele.ballarini@4science.com) + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@Component +public class ResourcePolicyPolicyTypeAddOperation extends PatchOperation { + + @Autowired + ResourcePolicyUtils resourcePolicyUtils; + + @Override + public R perform(Context context, R resource, Operation operation) { + checkOperationValue(operation.getValue()); + if (this.supports(resource, operation)) { + ResourcePolicy resourcePolicy = (ResourcePolicy) resource; + this.checkResourcePolicyForNonExistingPolicyTypeValue(resourcePolicy); + this.add(resourcePolicy, operation); + return resource; + } else { + throw new DSpaceBadRequestException(this.getClass() + " does not support this operation"); + } + } + + /** + * Performs the actual add policyType of resourcePolicy operation + * + * @param resourcePolicy resourcePolicy being patched + * @param operation patch operation + */ + private void add(ResourcePolicy resourcePolicy, Operation operation) { + String policyType = (String) operation.getValue(); + resourcePolicy.setRpType(policyType); + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof ResourcePolicy && operation.getOp().trim().equalsIgnoreCase(OPERATION_ADD) + && operation.getPath().trim().equalsIgnoreCase(OPERATION_PATH_POLICY_TYPE)); + } + + /** + * Throws DSpaceBadRequestException if a value is already set in the /policyType + * path. + * + * @param resource the resource to update + * + */ + void checkResourcePolicyForNonExistingPolicyTypeValue(ResourcePolicy resource) { + if (resource.getRpType() != null) { + throw new DSpaceBadRequestException("Attempting to add a value to an already existing path."); + } + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeRemoveOperation.java new file mode 100644 index 0000000000..e4f661781d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeRemoveOperation.java @@ -0,0 +1,73 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation.resourcePolicy; + +import static org.dspace.app.rest.repository.patch.operation.resourcePolicy.ResourcePolicyUtils.OPERATION_PATH_POLICY_TYPE; + +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.repository.patch.operation.PatchOperation; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Implementation for ResourcePolicy policyType DELETE patch. + * + * Example: + * curl -X PATCH http://${dspace.server.url}/api/authz/resourcepolicies/<:id-resourcepolicy> -H " + * Content-Type: application/json" -d '[{ "op": "remove", "path": " + * /policyType"]' + * + * + * @author Emanuele Ballarini (emanuele.ballarini@4science.com) + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@Component +public class ResourcePolicyPolicyTypeRemoveOperation extends PatchOperation { + + @Override + public R perform(Context context, R resource, Operation operation) { + if (this.supports(resource, operation)) { + ResourcePolicy resourcePolicy = (ResourcePolicy) resource; + this.checkResourcePolicyForExistingPolicyTypeValue(resourcePolicy, operation); + this.delete(resourcePolicy); + return resource; + } else { + throw new DSpaceBadRequestException(this.getClass() + " does not support this operation"); + } + } + + /** + * Performs the actual delete policyType of resourcePolicy operation + * + * @param resourcePolicy resourcePolicy being patched + */ + private void delete(ResourcePolicy resourcePolicy) { + resourcePolicy.setRpType(null); + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof ResourcePolicy && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) + && operation.getPath().trim().equalsIgnoreCase(OPERATION_PATH_POLICY_TYPE)); + } + + /** + * Throws DSpaceBadRequestException if attempting to delete a non-existent value + * in /policyType path. + * + * @param resource the resource to update + */ + void checkResourcePolicyForExistingPolicyTypeValue(ResourcePolicy resource, Operation operation) { + if (resource.getRpType() == null) { + throw new DSpaceBadRequestException( + "Attempting to " + operation.getOp() + " a non-existent policyType value."); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeReplaceOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeReplaceOperation.java new file mode 100644 index 0000000000..8606db181d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyPolicyTypeReplaceOperation.java @@ -0,0 +1,77 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository.patch.operation.resourcePolicy; + +import static org.dspace.app.rest.repository.patch.operation.resourcePolicy.ResourcePolicyUtils.OPERATION_PATH_POLICY_TYPE; + +import org.dspace.app.rest.exception.DSpaceBadRequestException; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.repository.patch.operation.PatchOperation; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.core.Context; +import org.springframework.stereotype.Component; + +/** + * Implementation for ResourcePolicy policyType REPLACE patch. + * + * Example: Example: + * curl -X PATCH http://${dspace.server.url}/api/authz/resourcepolicies/<:id-resourcepolicy> -H " + * Content-Type: application/json" -d '[{ "op": "replace", "path": " + * /policyType", "value": "TYPE_CUSTOM"]' + * + * + * @author Emanuele Ballarini (emanuele.ballarini@4science.com) + * @author Mykhaylo Boychuk (mykhaylo.boychuk@4science.com) + */ +@Component +public class ResourcePolicyPolicyTypeReplaceOperation extends PatchOperation { + + @Override + public R perform(Context context, R resource, Operation operation) { + checkOperationValue(operation.getValue()); + if (this.supports(resource, operation)) { + ResourcePolicy resourcePolicy = (ResourcePolicy) resource; + this.checkResourcePolicyForExistingPolicyTypeValue(resourcePolicy, operation); + this.replace(resourcePolicy, operation); + return resource; + } else { + throw new DSpaceBadRequestException(this.getClass() + " does not support this operation"); + } + } + + /** + * Performs the actual replace policyType of resourcePolicy operation + * + * @param resourcePolicy resourcePolicy being patched + * @param operation patch operation + */ + private void replace(ResourcePolicy resourcePolicy, Operation operation) { + String newPolicyType = (String) operation.getValue(); + resourcePolicy.setRpType(newPolicyType); + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return (objectToMatch instanceof ResourcePolicy && operation.getOp().trim().equalsIgnoreCase(OPERATION_REPLACE) + && operation.getPath().trim().equalsIgnoreCase(OPERATION_PATH_POLICY_TYPE)); + } + + /** + * Throws DSpaceBadRequestException when attempting to replace a non-existent + * value in /policyType path + * + * @param resource + * @param operation + */ + private void checkResourcePolicyForExistingPolicyTypeValue(ResourcePolicy resource, Operation operation) { + if (resource.getRpType() == null) { + throw new DSpaceBadRequestException( + "Attempting to " + operation.getOp() + " a non-existent policyType value."); + } + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java index d58c21093f..435480e318 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/resourcePolicy/ResourcePolicyUtils.java @@ -34,6 +34,8 @@ public class ResourcePolicyUtils { public static final String OPERATION_PATH_ENDDATE = "/endDate"; public static final String OPERATION_PATH_DESCRIPTION = "/description"; public static final String OPERATION_PATH_NAME = "/name"; + public static final String OPERATION_PATH_POLICY_TYPE = "/policyType"; + public static final String OPERATION_PATH_ACTION = "/action"; /** * Throws PatchBadRequestException for missing value in the /startDate path. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/EPersonRestAuthenticationProvider.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/EPersonRestAuthenticationProvider.java index 20844ec946..e55734e513 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/EPersonRestAuthenticationProvider.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/EPersonRestAuthenticationProvider.java @@ -11,14 +11,16 @@ import static org.dspace.app.rest.security.WebSecurityConfiguration.ADMIN_GRANT; import static org.dspace.app.rest.security.WebSecurityConfiguration.AUTHENTICATED_GRANT; import java.sql.SQLException; +import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Objects; +import javax.annotation.PostConstruct; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.login.PostLoggedInAction; import org.dspace.app.rest.utils.ContextUtil; -import org.dspace.app.util.AuthorizeUtil; import org.dspace.authenticate.AuthenticationMethod; import org.dspace.authenticate.service.AuthenticationService; import org.dspace.authorize.service.AuthorizeService; @@ -62,6 +64,16 @@ public class EPersonRestAuthenticationProvider implements AuthenticationProvider @Autowired private HttpServletRequest request; + @Autowired(required = false) + private List postLoggedInActions; + + @PostConstruct + public void postConstruct() { + if (postLoggedInActions == null) { + postLoggedInActions = Collections.emptyList(); + } + } + @Override public Authentication authenticate(Authentication authentication) throws AuthenticationException { Context context = ContextUtil.obtainContext(request); @@ -122,6 +134,15 @@ public class EPersonRestAuthenticationProvider implements AuthenticationProvider .getHeader(newContext, "login", "type=explicit")); output = createAuthentication(newContext); + + for (PostLoggedInAction action : postLoggedInActions) { + try { + action.loggedIn(newContext); + } catch (Exception ex) { + log.error("An error occurs performing post logged in action", ex); + } + } + } else { log.info(LogHelper.getHeader(newContext, "failed_login", "email=" + name + ", result=" @@ -176,20 +197,15 @@ public class EPersonRestAuthenticationProvider implements AuthenticationProvider EPerson eperson = context.getCurrentUser(); if (eperson != null) { boolean isAdmin = false; - boolean isCommunityAdmin = false; - boolean isCollectionAdmin = false; try { isAdmin = authorizeService.isAdmin(context, eperson); - isCommunityAdmin = authorizeService.isCommunityAdmin(context); - isCollectionAdmin = authorizeService.isCollectionAdmin(context); } catch (SQLException e) { log.error("SQL error while checking for admin rights", e); } if (isAdmin) { authorities.add(new SimpleGrantedAuthority(ADMIN_GRANT)); - } else if ((isCommunityAdmin && AuthorizeUtil.canCommunityAdminManageAccounts()) - || (isCollectionAdmin && AuthorizeUtil.canCollectionAdminManageAccounts())) { + } else if (authorizeService.isAccountManager(context)) { authorities.add(new SimpleGrantedAuthority(MANAGE_ACCESS_GROUP)); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/GroupRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/GroupRestPermissionEvaluatorPlugin.java index 6f168efc91..d2675399d8 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/GroupRestPermissionEvaluatorPlugin.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/GroupRestPermissionEvaluatorPlugin.java @@ -68,6 +68,11 @@ public class GroupRestPermissionEvaluatorPlugin extends RestObjectPermissionEval Group group = groupService.find(context, dsoId); + // if the group is one of the special groups of the context it is readable + if (context.getSpecialGroups().contains(group)) { + return true; + } + // anonymous user if (ePerson == null) { return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/ResearcherProfileRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/ResearcherProfileRestPermissionEvaluatorPlugin.java new file mode 100644 index 0000000000..66cc873db2 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/ResearcherProfileRestPermissionEvaluatorPlugin.java @@ -0,0 +1,76 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.security; + +import static org.dspace.app.rest.security.DSpaceRestPermission.DELETE; +import static org.dspace.app.rest.security.DSpaceRestPermission.READ; +import static org.dspace.app.rest.security.DSpaceRestPermission.WRITE; + +import java.io.Serializable; +import java.util.UUID; +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.model.ResearcherProfileRest; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.dspace.util.UUIDUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +/** + * + * An authenticated user is allowed to view, update or delete his or her own + * data. This {@link RestPermissionEvaluatorPlugin} implements that requirement. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Component +public class ResearcherProfileRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { + + @Autowired + private RequestService requestService; + + @Override + public boolean hasDSpacePermission(Authentication authentication, Serializable targetId, String targetType, + DSpaceRestPermission restPermission) { + + if (!READ.equals(restPermission) && !WRITE.equals(restPermission) && !DELETE.equals(restPermission)) { + return false; + } + + if (!StringUtils.equalsIgnoreCase(targetType, ResearcherProfileRest.NAME)) { + return false; + } + + UUID id = UUIDUtils.fromString(targetId.toString()); + if (id == null) { + return false; + } + + Request request = requestService.getCurrentRequest(); + Context context = ContextUtil.obtainContext((HttpServletRequest) request.getServletRequest()); + + EPerson currentUser = context.getCurrentUser(); + if (currentUser == null) { + return false; + } + + if (id.equals(currentUser.getID())) { + return true; + } + + return false; + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java index 23e6356216..afaf84d086 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/WebSecurityConfiguration.java @@ -11,6 +11,7 @@ import org.dspace.app.rest.exception.DSpaceAccessDeniedHandler; import org.dspace.authenticate.service.AuthenticationService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.security.SecurityProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; @@ -65,6 +66,9 @@ public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter { @Autowired private DSpaceAccessDeniedHandler accessDeniedHandler; + @Value("${management.endpoints.web.base-path:/actuator}") + private String actuatorBasePath; + @Override public void configure(WebSecurity webSecurity) throws Exception { // Define URL patterns which Spring Security will ignore entirely. @@ -83,7 +87,7 @@ public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter { // Configure authentication requirements for ${dspace.server.url}/api/ URL only // NOTE: REST API is hardcoded to respond on /api/. Other modules (OAI, SWORD, IIIF, etc) use other root paths. http.requestMatchers() - .antMatchers("/api/**", "/iiif/**") + .antMatchers("/api/**", "/iiif/**", actuatorBasePath + "/**") .and() // Enable Spring Security authorization on these paths .authorizeRequests() @@ -91,6 +95,7 @@ public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter { .antMatchers(HttpMethod.POST,"/api/authn/login").permitAll() // Everyone can call GET on the status endpoint (used to check your authentication status) .antMatchers(HttpMethod.GET, "/api/authn/status").permitAll() + .antMatchers(HttpMethod.GET, actuatorBasePath + "/info").hasAnyAuthority(ADMIN_GRANT) .and() // Tell Spring to not create Sessions .sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS).and() diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/AbstractProcessingStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/AbstractProcessingStep.java index 9d7ea86db1..8c03f4ef82 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/AbstractProcessingStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/AbstractProcessingStep.java @@ -35,5 +35,4 @@ public abstract class AbstractProcessingStep implements DataProcessingStep { protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService(); protected ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); protected WorkspaceItemService workspaceItemService = ContentServiceFactory.getInstance().getWorkspaceItemService(); - } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyAddPatchOperation.java index 5bbb0f0da5..5e6274d78f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyAddPatchOperation.java @@ -8,8 +8,6 @@ package org.dspace.app.rest.submit.factory.impl; import java.util.ArrayList; -import java.util.Collection; -import java.util.Iterator; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -49,31 +47,31 @@ public class BitstreamResourcePolicyAddPatchOperation extends AddPatchOperation< @Override void add(Context context, HttpServletRequest currentRequest, InProgressSubmission source, String path, Object value) throws Exception { + //"absolutePath": "files/0/accessConditions" //"path": "/sections/upload/files/0/accessConditions" - String[] split = getAbsolutePath(path).split("/"); + String[] splitAbsPath = getAbsolutePath(path).split("/"); + String[] splitPath = path.split("/"); Item item = source.getItem(); List bundle = itemService.getBundles(item, Constants.CONTENT_BUNDLE_NAME); ; - Collection uploadConfigsCollection = uploadConfigurationService.getMap().values(); - Iterator uploadConfigs = uploadConfigsCollection.iterator(); + UploadConfiguration uploadConfig = uploadConfigurationService.getMap().get(splitPath[2]); for (Bundle bb : bundle) { int idx = 0; for (Bitstream bitstream : bb.getBitstreams()) { - if (idx == Integer.parseInt(split[1])) { + if (idx == Integer.parseInt(splitAbsPath[1])) { List newAccessConditions = new ArrayList(); - if (split.length == 3) { + if (splitAbsPath.length == 3) { resourcePolicyService.removePolicies(context, bitstream, ResourcePolicy.TYPE_CUSTOM); newAccessConditions = evaluateArrayObject((LateObjectEvaluator) value); - } else if (split.length == 4) { + } else if (splitAbsPath.length == 4) { // contains "-", call index-based accessConditions it make not sense newAccessConditions.add(evaluateSingleObject((LateObjectEvaluator) value)); } - // TODO manage duplicate policy if (CollectionUtils.isNotEmpty(newAccessConditions)) { - BitstreamResourcePolicyUtils.findApplyResourcePolicy(context, uploadConfigs, bitstream, + BitstreamResourcePolicyUtils.findApplyResourcePolicy(context, uploadConfig, bitstream, newAccessConditions); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyUtils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyUtils.java index 3aa46fb8c8..44e5c9b7d9 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyUtils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/BitstreamResourcePolicyUtils.java @@ -10,9 +10,9 @@ package org.dspace.app.rest.submit.factory.impl; import java.sql.SQLException; import java.text.ParseException; import java.util.Date; -import java.util.Iterator; import java.util.List; +import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.model.AccessConditionDTO; import org.dspace.authorize.AuthorizeException; import org.dspace.content.DSpaceObject; @@ -37,27 +37,26 @@ public class BitstreamResourcePolicyUtils { * This function applies the resource policies. * * @param context The relevant DSpace Context. - * @param uploadConfigs The configured UploadConfigurations + * @param uploadConfig The configured UploadConfiguration * @param obj The applicable DSpace object whose policies should be determined * @param newAccessCondition The access condition containing the details for the desired policies * @throws SQLException If a database error occurs * @throws AuthorizeException If the user is not authorized + * @throws ParseException If parse error */ - public static void findApplyResourcePolicy(Context context, Iterator uploadConfigs, + public static void findApplyResourcePolicy(Context context, UploadConfiguration uploadConfiguration, DSpaceObject obj, List newAccessConditions) throws SQLException, AuthorizeException, ParseException { - while (uploadConfigs.hasNext()) { - UploadConfiguration uploadConfiguration = uploadConfigs.next(); - for (AccessConditionDTO newAccessCondition : newAccessConditions) { - String name = newAccessCondition.getName(); - String description = newAccessCondition.getDescription(); + for (AccessConditionDTO newAccessCondition : newAccessConditions) { + String name = newAccessCondition.getName(); + String description = newAccessCondition.getDescription(); - Date startDate = newAccessCondition.getStartDate(); - Date endDate = newAccessCondition.getEndDate(); + Date startDate = newAccessCondition.getStartDate(); + Date endDate = newAccessCondition.getEndDate(); - findApplyResourcePolicy(context, uploadConfiguration, obj, name, description, startDate, endDate); - } + findApplyResourcePolicy(context, uploadConfiguration, obj, name, description, startDate, endDate); } + } /** @@ -66,7 +65,7 @@ public class BitstreamResourcePolicyUtils { * The description, start date and end date are applied as well * * @param context The relevant DSpace Context. - * @param uploadConfigs The configured UploadConfigurations + * @param uploadConfiguration The configured UploadConfiguration * @param obj The applicable DSpace object whose policies should be determined * @param name The name of the access condition matching the desired policies * @param description An optional description for the policies @@ -74,16 +73,24 @@ public class BitstreamResourcePolicyUtils { * @param endDate An optional end date for the policies * @throws SQLException If a database error occurs * @throws AuthorizeException If the user is not authorized + * @throws ParseException If parse error */ public static void findApplyResourcePolicy(Context context, UploadConfiguration uploadConfiguration, DSpaceObject obj, String name, String description, Date startDate, Date endDate) throws SQLException, AuthorizeException, ParseException { + boolean found = false; for (AccessConditionOption aco : uploadConfiguration.getOptions()) { if (aco.getName().equalsIgnoreCase(name)) { aco.createResourcePolicy(context, obj, name, description, startDate, endDate); - return; + found = true; + break; } } + // unexisting/unconfigured access conditions are no longer accepted + if (!found) { + throw new UnprocessableEntityException("The provided access condition: " + name + " is not supported!"); + } + return; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java index 10a96a557f..6b66947c4b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/DescribeStep.java @@ -31,6 +31,8 @@ import org.dspace.content.InProgressSubmission; import org.dspace.content.MetadataValue; import org.dspace.core.Context; import org.dspace.core.Utils; +import org.dspace.services.ConfigurationService; +import org.dspace.services.factory.DSpaceServicesFactory; /** * Describe step for DSpace Spring Rest. Expose and allow patching of the in progress submission metadata. It is @@ -43,7 +45,11 @@ public class DescribeStep extends AbstractProcessingStep { private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DescribeStep.class); + // Input reader for form configuration private DCInputsReader inputReader; + // Configuration service + private final ConfigurationService configurationService = + DSpaceServicesFactory.getInstance().getConfigurationService(); public DescribeStep() throws DCInputsReaderException { inputReader = new DCInputsReader(); @@ -64,9 +70,19 @@ public class DescribeStep extends AbstractProcessingStep { private void readField(InProgressSubmission obj, SubmissionStepConfig config, DataDescribe data, DCInputSet inputConfig) throws DCInputsReaderException { + String documentTypeValue = ""; + List documentType = itemService.getMetadataByMetadataString(obj.getItem(), + configurationService.getProperty("submit.type-bind.field", "dc.type")); + if (documentType.size() > 0) { + documentTypeValue = documentType.get(0).getValue(); + } + + // Get list of all field names (including qualdrop names) allowed for this dc.type + List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + + // Loop input rows and process submitted metadata for (DCInput[] row : inputConfig.getFields()) { for (DCInput input : row) { - List fieldsName = new ArrayList(); if (input.isQualdropValue()) { for (Object qualifier : input.getPairs()) { @@ -91,20 +107,30 @@ public class DescribeStep extends AbstractProcessingStep { String[] metadataToCheck = Utils.tokenize(md.getMetadataField().toString()); if (data.getMetadata().containsKey( Utils.standardize(metadataToCheck[0], metadataToCheck[1], metadataToCheck[2], "."))) { - data.getMetadata() - .get(Utils.standardize(md.getMetadataField().getMetadataSchema().getName(), - md.getMetadataField().getElement(), - md.getMetadataField().getQualifier(), - ".")) - .add(dto); + // If field is allowed by type bind, add value to existing field set, otherwise remove + // all values for this field + if (allowedFieldNames.contains(fieldName)) { + data.getMetadata() + .get(Utils.standardize(md.getMetadataField().getMetadataSchema().getName(), + md.getMetadataField().getElement(), + md.getMetadataField().getQualifier(), + ".")) + .add(dto); + } else { + data.getMetadata().remove(Utils.standardize(metadataToCheck[0], metadataToCheck[1], + metadataToCheck[2], ".")); + } } else { - List listDto = new ArrayList<>(); - listDto.add(dto); - data.getMetadata() - .put(Utils.standardize(md.getMetadataField().getMetadataSchema().getName(), - md.getMetadataField().getElement(), - md.getMetadataField().getQualifier(), - "."), listDto); + // Add values only if allowed by type bind + if (allowedFieldNames.contains(fieldName)) { + List listDto = new ArrayList<>(); + listDto.add(dto); + data.getMetadata() + .put(Utils.standardize(md.getMetadataField().getMetadataSchema().getName(), + md.getMetadataField().getElement(), + md.getMetadataField().getQualifier(), + "."), listDto); + } } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/SherpaPolicyStep.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/SherpaPolicyStep.java new file mode 100644 index 0000000000..d37182904b --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/SherpaPolicyStep.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.submit.step; + +import java.util.Objects; +import javax.servlet.http.HttpServletRequest; + +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.step.SherpaPolicy; +import org.dspace.app.rest.submit.AbstractProcessingStep; +import org.dspace.app.rest.submit.SubmissionService; +import org.dspace.app.sherpa.cache.SherpaCacheEvictService; +import org.dspace.app.sherpa.submit.SHERPASubmitService; +import org.dspace.app.sherpa.v2.SHERPAResponse; +import org.dspace.app.util.SubmissionStepConfig; +import org.dspace.content.InProgressSubmission; +import org.dspace.core.Context; +import org.dspace.utils.DSpace; +import org.dspace.web.ContextUtil; + +/** + * SherpaPolicy step for DSpace Spring Rest. Expose information about + * the Sherpa policies for the in progress submission. + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class SherpaPolicyStep extends AbstractProcessingStep { + + public static final String SHERPA_RETRIEVAL_TIME = "retrievalTime"; + + private SherpaCacheEvictService sherpaCacheEvictService = new DSpace().getSingletonService( + SherpaCacheEvictService.class); + private SHERPASubmitService sherpaSubmitService = new DSpace().getSingletonService(SHERPASubmitService.class); + + @Override + @SuppressWarnings("unchecked") + public SherpaPolicy getData(SubmissionService submissionService, InProgressSubmission obj, + SubmissionStepConfig config) throws Exception { + Context context = ContextUtil.obtainCurrentRequestContext(); + SHERPAResponse response = sherpaSubmitService.searchRelatedJournals(context, obj.getItem()); + if (Objects.nonNull(response)) { + SherpaPolicy result = new SherpaPolicy(); + result.setSherpaResponse(response); + return result; + } + return null; + } + + @Override + public void doPatchProcessing(Context context, HttpServletRequest currentRequest, InProgressSubmission source, + Operation op, SubmissionStepConfig stepConf) throws Exception { + String path = op.getPath(); + if (path.contains(SHERPA_RETRIEVAL_TIME)) { + sherpaCacheEvictService.evictCacheValues(context, source.getItem()); + } + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java index e5ba916e0f..a0f9d9d845 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/step/validation/MetadataValidation.java @@ -16,6 +16,7 @@ import org.apache.logging.log4j.Logger; import org.dspace.app.rest.model.ErrorRest; import org.dspace.app.rest.repository.WorkspaceItemRestRepository; import org.dspace.app.rest.submit.SubmissionService; +import org.dspace.app.rest.utils.ContextUtil; import org.dspace.app.util.DCInput; import org.dspace.app.util.DCInputSet; import org.dspace.app.util.DCInputsReader; @@ -25,6 +26,7 @@ import org.dspace.content.InProgressSubmission; import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.MetadataAuthorityService; import org.dspace.content.service.ItemService; +import org.dspace.services.ConfigurationService; /** * Execute three validation check on fields validation: @@ -50,12 +52,25 @@ public class MetadataValidation extends AbstractValidation { private MetadataAuthorityService metadataAuthorityService; + private ConfigurationService configurationService; + @Override public List validate(SubmissionService submissionService, InProgressSubmission obj, SubmissionStepConfig config) throws DCInputsReaderException, SQLException { List errors = new ArrayList<>(); + String documentTypeValue = ""; DCInputSet inputConfig = getInputReader().getInputsByFormName(config.getId()); + List documentType = itemService.getMetadataByMetadataString(obj.getItem(), + configurationService.getProperty("submit.type-bind.field", "dc.type")); + if (documentType.size() > 0) { + documentTypeValue = documentType.get(0).getValue(); + } + + // Get list of all field names (including qualdrop names) allowed for this dc.type + List allowedFieldNames = inputConfig.populateAllowedFieldNames(documentTypeValue); + + // Begin the actual validation loop for (DCInput[] row : inputConfig.getFields()) { for (DCInput input : row) { String fieldKey = @@ -63,6 +78,7 @@ public class MetadataValidation extends AbstractValidation { boolean isAuthorityControlled = metadataAuthorityService.isAuthorityControlled(fieldKey); List fieldsName = new ArrayList(); + if (input.isQualdropValue()) { boolean foundResult = false; List inputPairs = input.getPairs(); @@ -71,31 +87,61 @@ public class MetadataValidation extends AbstractValidation { for (int i = 1; i < inputPairs.size(); i += 2) { String fullFieldname = input.getFieldName() + "." + (String) inputPairs.get(i); List mdv = itemService.getMetadataByMetadataString(obj.getItem(), fullFieldname); - validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors); - if (mdv.size() > 0 && input.isVisible(DCInput.SUBMISSION_SCOPE)) { - foundResult = true; + + // Check the lookup list. If no other inputs of the same field name allow this type, + // then remove. This includes field name without qualifier. + if (!input.isAllowedFor(documentTypeValue) && (!allowedFieldNames.contains(fullFieldname) + && !allowedFieldNames.contains(input.getFieldName()))) { + itemService.removeMetadataValues(ContextUtil.obtainCurrentRequestContext(), + obj.getItem(), mdv); + } else { + validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors); + if (mdv.size() > 0 && input.isVisible(DCInput.SUBMISSION_SCOPE)) { + foundResult = true; + } } } - if (input.isRequired() && ! foundResult) { + if (input.isRequired() && !foundResult) { // for this required qualdrop no value was found, add to the list of error fields addError(errors, ERROR_VALIDATION_REQUIRED, - "/" + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" + - input.getFieldName()); + "/" + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" + + input.getFieldName()); } - } else { fieldsName.add(input.getFieldName()); } for (String fieldName : fieldsName) { + boolean valuesRemoved = false; List mdv = itemService.getMetadataByMetadataString(obj.getItem(), fieldName); + if (!input.isAllowedFor(documentTypeValue)) { + // Check the lookup list. If no other inputs of the same field name allow this type, + // then remove. Otherwise, do not + if (!(allowedFieldNames.contains(fieldName))) { + itemService.removeMetadataValues(ContextUtil.obtainCurrentRequestContext(), + obj.getItem(), mdv); + valuesRemoved = true; + log.debug("Stripping metadata values for " + input.getFieldName() + " on type " + + documentTypeValue + " as it is allowed by another input of the same field " + + "name"); + } else { + log.debug("Not removing unallowed metadata values for " + input.getFieldName() + " on type " + + documentTypeValue + " as it is allowed by another input of the same field " + + "name"); + } + } validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors); - if ((input.isRequired() && mdv.size() == 0) && input.isVisible(DCInput.SUBMISSION_SCOPE)) { - // since this field is missing add to list of error - // fields - addError(errors, ERROR_VALIDATION_REQUIRED, - "/" + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" + - input.getFieldName()); + if ((input.isRequired() && mdv.size() == 0) && input.isVisible(DCInput.SUBMISSION_SCOPE) + && !valuesRemoved) { + // Is the input required for *this* type? In other words, are we looking at a required + // input that is also allowed for this document type + if (input.isAllowedFor(documentTypeValue)) { + // since this field is missing add to list of error + // fields + addError(errors, ERROR_VALIDATION_REQUIRED, "/" + + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" + + input.getFieldName()); + } } } } @@ -103,6 +149,7 @@ public class MetadataValidation extends AbstractValidation { return errors; } + private void validateMetadataValues(List mdv, DCInput input, SubmissionStepConfig config, boolean isAuthorityControlled, String fieldKey, List errors) { @@ -124,6 +171,10 @@ public class MetadataValidation extends AbstractValidation { } } + public void setConfigurationService(ConfigurationService configurationService) { + this.configurationService = configurationService; + } + public void setItemService(ItemService itemService) { this.itemService = itemService; } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/BitstreamResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/BitstreamResource.java index 694e6a254a..4e5545fabc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/BitstreamResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/BitstreamResource.java @@ -11,6 +11,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.Set; import java.util.UUID; import org.apache.commons.io.IOUtils; @@ -40,6 +41,7 @@ public class BitstreamResource extends AbstractResource { private UUID currentUserUUID; private boolean shouldGenerateCoverPage; private byte[] file; + private Set currentSpecialGroups; private BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService(); private EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService(); @@ -47,11 +49,12 @@ public class BitstreamResource extends AbstractResource { new DSpace().getServiceManager() .getServicesByType(CitationDocumentService.class).get(0); - public BitstreamResource(String name, UUID uuid, UUID currentUserUUID, + public BitstreamResource(String name, UUID uuid, UUID currentUserUUID, Set currentSpecialGroups, boolean shouldGenerateCoverPage) { this.name = name; this.uuid = uuid; this.currentUserUUID = currentUserUUID; + this.currentSpecialGroups = currentSpecialGroups; this.shouldGenerateCoverPage = shouldGenerateCoverPage; } @@ -84,9 +87,8 @@ public class BitstreamResource extends AbstractResource { @Override public InputStream getInputStream() throws IOException { - try (Context context = new Context()) { - EPerson currentUser = ePersonService.find(context, currentUserUUID); - context.setCurrentUser(currentUser); + try (Context context = initializeContext()) { + Bitstream bitstream = bitstreamService.find(context, uuid); InputStream out; @@ -110,9 +112,7 @@ public class BitstreamResource extends AbstractResource { @Override public long contentLength() throws IOException { - try (Context context = new Context()) { - EPerson currentUser = ePersonService.find(context, currentUserUUID); - context.setCurrentUser(currentUser); + try (Context context = initializeContext()) { Bitstream bitstream = bitstreamService.find(context, uuid); if (shouldGenerateCoverPage) { return getCoverpageByteArray(context, bitstream).length; @@ -123,4 +123,12 @@ public class BitstreamResource extends AbstractResource { throw new IOException(e); } } + + private Context initializeContext() throws SQLException { + Context context = new Context(); + EPerson currentUser = ePersonService.find(context, currentUserUUID); + context.setCurrentUser(currentUser); + currentSpecialGroups.forEach(context::setSpecialGroup); + return context; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceVersionConfigurationEnricher.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceVersionConfigurationEnricher.java new file mode 100644 index 0000000000..a2e27bb5bc --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceVersionConfigurationEnricher.java @@ -0,0 +1,35 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.utils; + +import org.dspace.app.util.Util; +import org.dspace.services.ConfigurationService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.ApplicationArguments; +import org.springframework.boot.ApplicationRunner; +import org.springframework.stereotype.Component; + +/** + * Class that use the configuration service to add a property named + * 'dspace.version' with the current DSpace application version. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@Component +public class DSpaceVersionConfigurationEnricher implements ApplicationRunner { + + @Autowired + private ConfigurationService configurationService; + + @Override + public void run(ApplicationArguments args) throws Exception { + configurationService.addPropertyValue("dspace.version", Util.getSourceVersion()); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java index 8e1bb37005..80279619eb 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/Utils.java @@ -23,10 +23,12 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; +import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; +import java.net.URLDecoder; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; @@ -470,7 +472,7 @@ public class Utils { String line = scanner.nextLine(); if (org.springframework.util.StringUtils.hasText(line)) { - list.add(line); + list.add(decodeUrl(line)); } } @@ -480,6 +482,14 @@ public class Utils { return list; } + private String decodeUrl(String url) { + try { + return URLDecoder.decode(url, "UTF-8"); + } catch (UnsupportedEncodingException e) { + log.warn("The following url could not be decoded: " + url); + } + return StringUtils.EMPTY; + } /** * This method will retrieve a list of DSpaceObjects from the Request by reading in the Request's InputStream diff --git a/dspace-server-webapp/src/main/resources/application.properties b/dspace-server-webapp/src/main/resources/application.properties index 5992ded040..f6fba076c0 100644 --- a/dspace-server-webapp/src/main/resources/application.properties +++ b/dspace-server-webapp/src/main/resources/application.properties @@ -70,6 +70,9 @@ server.servlet.encoding.force=true # However, you may wish to set this to "always" in your 'local.cfg' for development or debugging purposes. server.error.include-stacktrace = never +# When to include the error message in error responses (introduced in Spring 2.3.x) +server.error.include-message = always + # Spring Boot proxy configuration (can be overridden in local.cfg). # By default, Spring Boot does not automatically use X-Forwarded-* Headers when generating links (and similar) in the # DSpace REST API. Three options are currently supported by Spring Boot: diff --git a/dspace-server-webapp/src/main/resources/spring/spring-dspace-addon-validation-services.xml b/dspace-server-webapp/src/main/resources/spring/spring-dspace-addon-validation-services.xml index 0dc968674a..f39d553c96 100644 --- a/dspace-server-webapp/src/main/resources/spring/spring-dspace-addon-validation-services.xml +++ b/dspace-server-webapp/src/main/resources/spring/spring-dspace-addon-validation-services.xml @@ -16,6 +16,7 @@ + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/oai/OAIpmhIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/oai/OAIpmhIT.java index 2c50257ec9..22ece756fe 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/oai/OAIpmhIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/oai/OAIpmhIT.java @@ -63,9 +63,9 @@ public class OAIpmhIT extends AbstractControllerIntegrationTest { private ConfigurationService configurationService; // All OAI-PMH paths that we test against - private final String ROOT_PATH = "/oai"; + private final String ROOT_PATH = "/oai/"; private final String DEFAULT_CONTEXT_PATH = "request"; - private final String DEFAULT_CONTEXT = ROOT_PATH + "/" + DEFAULT_CONTEXT_PATH; + private final String DEFAULT_CONTEXT = ROOT_PATH + DEFAULT_CONTEXT_PATH; // Mock to ensure XOAI caching is disabled for all tests (see @Before method) @MockBean diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ADSImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ADSImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..4878cdecab --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ADSImportMetadataSourceServiceIT.java @@ -0,0 +1,274 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import javax.el.MethodNotFoundException; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.ads.ADSImportMetadataSourceServiceImpl; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link ADSImportMetadataSourceServiceImpl} + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class ADSImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClient; + + @Autowired + private ADSImportMetadataSourceServiceImpl adsServiceImpl; + + @Test + public void adsImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ads-ex.json")) { + + String adsJsonResp = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(adsJsonResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = adsServiceImpl.getRecords("test query", 0, 2); + assertEquals(2, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void adsImportMetadataGetRecordsCountTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ads-ex.json")) { + String adsResp = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(adsResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + int tot = adsServiceImpl.getRecordsCount("test query"); + assertEquals(9383, tot); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void adsImportMetadataGetRecordsCountByQueryTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ads-ex.json")) { + String adsResp = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(adsResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + Query q = new Query(); + q.addParameter("query", "test"); + int tot = adsServiceImpl.getRecordsCount(q); + assertEquals(9383, tot); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test(expected = MethodNotFoundException.class) + public void adsImportMetadataFindMatchingRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + org.dspace.content.Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, col1) + .withTitle("test item") + .withIssueDate("2021") + .build(); + context.restoreAuthSystemState(); + adsServiceImpl.findMatchingRecords(testItem); + } + + @Test + public void adsImportMetadataGetRecordByIdTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream file = getClass().getResourceAsStream("ads-single-obj.json")) { + + String adsJsonResp = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClient.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(adsJsonResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + collection2match.remove(1); + ImportRecord recordImported = adsServiceImpl.getRecord("2017PhRvL.119p1101A"); + assertNotNull(recordImported); + matchRecords(new ArrayList(Arrays.asList(recordImported)), collection2match); + } finally { + liveImportClient.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + //define first record + List metadatums = new ArrayList(); + MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "Abbott, B. P."); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Babak, S."); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Di Fiore, L."); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Virgo Collaboration"); + MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.1103/PhysRevLett.116.061102"); + MetadatumDTO type = createMetadatumDTO("dc", "type", null, "article"); + MetadatumDTO adsbibcode = createMetadatumDTO("dc", "identifier", "other", "2016PhRvL.116f1102A"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO subject = createMetadatumDTO("dc", "subject", null, "General Relativity and Quantum Cosmology"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, + "Astrophysics - High Energy Astrophysical Phenomena"); + MetadatumDTO source = createMetadatumDTO("dc", "source", null, "Physical Review Letters"); + MetadatumDTO title = createMetadatumDTO("dc", "title", null, + "Observation of Gravitational Waves from a Binary Black Hole Merger"); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", + "On September 14, 2015 at 09:50:45 UTC the two detectors of the Laser" + + " Interferometer Gravitational-Wave Observatory simultaneously observed" + + " a transient gravitational-wave signal. The signal sweeps upwards in frequency" + + " from 35 to 250 Hz with a peak gravitational-wave strain of 1.0 ×10-21." + + " It matches the waveform predicted by general relativity for the inspiral and merger" + + " of a pair of black holes and the ringdown of the resulting single black hole." + + " The signal was observed with a matched-filter signal-to-noise ratio of 24 and a false" + + " alarm rate estimated to be less than 1 event per 203 000 years, equivalent to a significance" + + " greater than 5.1 σ . The source lies at a luminosity distance of 41 0-180+160" + + " Mpc corresponding to a redshift z =0.0 9-0.04+0.03 ." + + " In the source frame, the initial black hole masses are 3 6-4+5M" + + " and 2 9-4+4M , and the final black hole mass is" + + " 6 2-4+4M , with 3. 0-0.5+0.5M" + + " c2 radiated in gravitational waves. All uncertainties define 90% credible intervals." + + " These observations demonstrate the existence of binary stellar-mass black hole systems." + + " This is the first direct detection of gravitational waves and the first observation of a binary" + + " black hole merger."); + + metadatums.add(description); + metadatums.add(author); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(doi); + metadatums.add(type); + metadatums.add(adsbibcode); + metadatums.add(date); + metadatums.add(subject); + metadatums.add(subject2); + metadatums.add(source); + metadatums.add(title); + + ImportRecord firstrRecord = new ImportRecord(metadatums); + + //define second record + List metadatums2 = new ArrayList(); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Abbott, B. P."); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Babak, S."); + MetadatumDTO author7 = createMetadatumDTO("dc", "contributor", "author", "Vorvick, C."); + MetadatumDTO author8 = createMetadatumDTO("dc", "contributor", "author", "Wade, M."); + MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.1103/PhysRevLett.119.161101"); + MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "article"); + MetadatumDTO adsbibcode2 = createMetadatumDTO("dc", "identifier", "other", "2017PhRvL.119p1101A"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2017"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "General Relativity and Quantum Cosmology"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, + "Astrophysics - High Energy Astrophysical Phenomena"); + MetadatumDTO source2 = createMetadatumDTO("dc", "source", null, "Physical Review Letters"); + MetadatumDTO title2 = createMetadatumDTO("dc", "title", null, + "GW170817: Observation of Gravitational Waves from a Binary Neutron Star Inspiral"); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", + "On August 17, 2017 at 12∶41:04 UTC the Advanced LIGO and Advanced Virgo" + + " gravitational-wave detectors made their first observation of a binary neutron star inspiral." + + " The signal, GW170817, was detected with a combined signal-to-noise ratio of 32.4 and a" + + " false-alarm-rate estimate of less than one per 8.0 ×104 years ." + + " We infer the component masses of the binary to be between 0.86 and 2.26 M ," + + " in agreement with masses of known neutron stars. Restricting the component spins to the" + + " range inferred in binary neutron stars, we find the component masses to be in the" + + " range 1.17 - 1.60 M , with the total mass of the system 2.7" + + " 4-0.01+0.04M . The source was localized within a sky region" + + " of 28 deg2 (90% probability) and had a luminosity distance of 4" + + " 0-14+8 Mpc , the closest and most precisely localized" + + " gravitational-wave signal yet. The association with the γ -ray burst GRB 170817A," + + " detected by Fermi-GBM 1.7 s after the coalescence, corroborates the hypothesis of a neutron" + + " star merger and provides the first direct evidence of a link between these mergers and" + + " short γ -ray bursts. Subsequent identification of transient counterparts across the" + + " electromagnetic spectrum in the same location further supports the interpretation of" + + " this event as a neutron star merger. This unprecedented joint gravitational and" + + " electromagnetic observation provides insight into astrophysics, dense matter," + + " gravitation, and cosmology."); + + metadatums2.add(description2); + metadatums2.add(author5); + metadatums2.add(author6); + metadatums2.add(author7); + metadatums2.add(author8); + metadatums2.add(doi2); + metadatums2.add(type2); + metadatums2.add(adsbibcode2); + metadatums2.add(date2); + metadatums2.add(subject3); + metadatums2.add(subject4); + metadatums2.add(source2); + metadatums2.add(title2); + + ImportRecord secondRecord = new ImportRecord(metadatums2); + records.add(firstrRecord); + records.add(secondRecord); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AbstractLiveImportIntegrationTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AbstractLiveImportIntegrationTest.java new file mode 100644 index 0000000000..b2623a63f9 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AbstractLiveImportIntegrationTest.java @@ -0,0 +1,100 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.UnsupportedEncodingException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang3.StringUtils; +import org.apache.http.ProtocolVersion; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.entity.BasicHttpEntity; +import org.apache.tools.ant.filters.StringInputStream; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; + +/** + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class AbstractLiveImportIntegrationTest extends AbstractControllerIntegrationTest { + + protected void matchRecords(ArrayList recordsImported, ArrayList records2match) { + assertEquals(records2match.size(), recordsImported.size()); + for (int i = 0; i < recordsImported.size(); i++) { + ImportRecord firstImported = recordsImported.get(i); + ImportRecord first2match = records2match.get(i); + checkMetadataValue(firstImported.getValueList(), first2match.getValueList()); + } + } + + private void checkMetadataValue(List list, List list2) { + assertEquals(list.size(), list2.size()); + for (int i = 0; i < list.size(); i++) { + assertTrue(sameMetadatum(list.get(i), list2.get(i))); + } + } + + private boolean sameMetadatum(MetadatumDTO metadatum, MetadatumDTO metadatum2) { + if (StringUtils.equals(metadatum.getSchema(), metadatum2.getSchema()) && + StringUtils.equals(metadatum.getElement(), metadatum2.getElement()) && + StringUtils.equals(metadatum.getQualifier(), metadatum2.getQualifier()) && + StringUtils.equals(metadatum.getValue(), metadatum2.getValue())) { + return true; + } + return false; + } + + protected MetadatumDTO createMetadatumDTO(String schema, String element, String qualifier, String value) { + MetadatumDTO metadatumDTO = new MetadatumDTO(); + metadatumDTO.setSchema(schema); + metadatumDTO.setElement(element); + metadatumDTO.setQualifier(qualifier); + metadatumDTO.setValue(value); + return metadatumDTO; + } + + protected CloseableHttpResponse mockResponse(String xmlExample, int statusCode, String reason) + throws UnsupportedEncodingException { + BasicHttpEntity basicHttpEntity = new BasicHttpEntity(); + basicHttpEntity.setChunked(true); + basicHttpEntity.setContent(new StringInputStream(xmlExample)); + + CloseableHttpResponse response = mock(CloseableHttpResponse.class); + when(response.getStatusLine()).thenReturn(statusLine(statusCode, reason)); + when(response.getEntity()).thenReturn(basicHttpEntity); + return response; + } + + protected StatusLine statusLine(int statusCode, String reason) { + return new StatusLine() { + @Override + public ProtocolVersion getProtocolVersion() { + return new ProtocolVersion("http", 1, 1); + } + + @Override + public int getStatusCode() { + return statusCode; + } + + @Override + public String getReasonPhrase() { + return reason; + } + }; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java index 2dc6055909..ae6be934ce 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/AuthenticationRestControllerIT.java @@ -48,6 +48,7 @@ import org.dspace.app.rest.converter.EPersonConverter; import org.dspace.app.rest.matcher.AuthenticationStatusMatcher; import org.dspace.app.rest.matcher.AuthorizationMatcher; import org.dspace.app.rest.matcher.EPersonMatcher; +import org.dspace.app.rest.matcher.GroupMatcher; import org.dspace.app.rest.matcher.HalMatcher; import org.dspace.app.rest.model.EPersonRest; import org.dspace.app.rest.projection.DefaultProjection; @@ -106,6 +107,10 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio "org.dspace.authenticate.IPAuthentication", "org.dspace.authenticate.ShibAuthentication" }; + public static final String[] PASS_AND_IP = { + "org.dspace.authenticate.PasswordAuthentication", + "org.dspace.authenticate.IPAuthentication" + }; // see proxies.trusted.ipranges in local.cfg public static final String TRUSTED_IP = "7.7.7.7"; @@ -160,6 +165,101 @@ public class AuthenticationRestControllerIT extends AbstractControllerIntegratio .andExpect(status().isNoContent()); } + /** + * This test verifies: + * - that a logged in via password user finds the expected specialGroupPwd in _embedded.specialGroups; + * - that a logged in via password and specific IP user finds the expected specialGroupPwd and specialGroupIP + * in _embedded.specialGroups; + * - that a not logged in user with a specific IP finds the expected specialGroupIP in _embedded.specialGroups; + * @throws Exception + */ + @Test + public void testStatusGetSpecialGroups() throws Exception { + context.turnOffAuthorisationSystem(); + + Group specialGroupPwd = GroupBuilder.createGroup(context) + .withName("specialGroupPwd") + .build(); + Group specialGroupIP = GroupBuilder.createGroup(context) + .withName("specialGroupIP") + .build(); + + configurationService.setProperty("plugin.sequence.org.dspace.authenticate.AuthenticationMethod", PASS_AND_IP); + configurationService.setProperty("authentication-password.login.specialgroup","specialGroupPwd"); + configurationService.setProperty("authentication-ip.specialGroupIP", "123.123.123.123"); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/authn/status").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd")))); + + // try the special groups link endpoint in the same scenario than above + getClient(token).perform(get("/api/authn/status/specialGroups").param("projection", "full")) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd")))); + + getClient(token).perform(get("/api/authn/status").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(true))) + .andExpect(jsonPath("$.authenticationMethod", is("password"))) + .andExpect(jsonPath("$.type", is("status"))) + .andExpect(jsonPath("$._links.specialGroups.href", startsWith(REST_SERVER_URL))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd"), + GroupMatcher.matchGroupWithName("specialGroupIP")))); + + // try the special groups link endpoint in the same scenario than above + getClient(token).perform(get("/api/authn/status/specialGroups").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupPwd"), + GroupMatcher.matchGroupWithName("specialGroupIP")))); + + getClient().perform(get("/api/authn/status").param("projection", "full").with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", AuthenticationStatusMatcher.matchFullEmbeds())) + // fails due to bug https://github.com/DSpace/DSpace/issues/8274 + //.andExpect(jsonPath("$", AuthenticationStatusMatcher.matchLinks())) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$.okay", is(true))) + .andExpect(jsonPath("$.authenticated", is(false))) + .andExpect(jsonPath("$._embedded.specialGroups._embedded.specialGroups", + Matchers.containsInAnyOrder(GroupMatcher.matchGroupWithName("specialGroupIP")))); + + // try the special groups link endpoint in the same scenario than above + getClient().perform(get("/api/authn/status/specialGroups").param("projection", "full") + .with(ip("123.123.123.123"))) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.specialGroups", + Matchers.containsInAnyOrder( + GroupMatcher.matchGroupWithName("specialGroupIP")))); + } + @Test @Ignore // Ignored until an endpoint is added to return all groups. Anonymous is not considered a direct group. diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 78373a85f5..09dbdca505 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -684,6 +684,56 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest checkNumberOfStatsRecords(bitstream, 1); } + @Test + public void restrictedSpecialGroupBitstreamTest() throws Exception { + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + Group restrictedGroup = GroupBuilder.createGroup(context) + .withName("Restricted Group") + .build(); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + + Item item = ItemBuilder.createItem(context, col1) + .withTitle("item 1") + .withIssueDate("2013-01-17") + .withAuthor("Doe, John") + .build(); + + bitstream = BitstreamBuilder + .createBitstream(context, item, is) + .withName("Test Embargoed Bitstream") + .withDescription("This bitstream is embargoed") + .withMimeType("text/plain") + .withReaderGroup(restrictedGroup) + .build(); + } + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isForbidden()); + + configurationService.setProperty("authentication-password.login.specialgroup", "Restricted Group"); + + authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + + checkNumberOfStatsRecords(bitstream, 1); + + } + @Test public void restrictedGroupBitstreamAccessGrantByAdminsTest() throws Exception { context.turnOffAuthorisationSystem(); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionGroupRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionGroupRestControllerIT.java index 767ea5f565..f6ab10c087 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionGroupRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CollectionGroupRestControllerIT.java @@ -28,6 +28,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.service.AuthorizeService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.WorkspaceItemBuilder; import org.dspace.content.Collection; import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; @@ -2414,4 +2415,27 @@ public class CollectionGroupRestControllerIT extends AbstractControllerIntegrati .andExpect(status().isNotFound()); } + @Test + public void deleteCollectionWorkflowGroupWithPooledTaskTest() throws Exception { + context.turnOffAuthorisationSystem(); + Group reviewer = workflowService.createWorkflowRoleGroup(context, collection, "reviewer"); + + // Submit an Item into the workflow -> moves to the "reviewer" step's pool. + // The role must have at least one EPerson, otherwise the WSI gets archived immediately + groupService.addMember(context, reviewer, eperson); + workflowService.start( + context, + WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Dummy Item") + .build() + ); + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + getClient(token).perform(delete("/api/core/collections/" + collection.getID() + "/workflowGroups/reviewer")) + .andExpect(status().isUnprocessableEntity()); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..9a0d39225c --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -0,0 +1,199 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import javax.el.MethodNotFoundException; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.crossref.CrossRefImportMetadataSourceServiceImpl; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link CrossRefImportMetadataSourceServiceImpl} + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Autowired + private CrossRefImportMetadataSourceServiceImpl crossRefServiceImpl; + + @Test + public void crossRefImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream crossRefResp = getClass().getResourceAsStream("crossRef-test.json")) { + + String crossRefRespXmlResp = IOUtils.toString(crossRefResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(crossRefRespXmlResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = crossRefServiceImpl.getRecords("test query", 0, 2); + assertEquals(2, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void crossRefImportMetadataGetRecordsCountTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream crossRefResp = getClass().getResourceAsStream("crossRef-test.json")) { + String crossRefRespXmlResp = IOUtils.toString(crossRefResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(crossRefRespXmlResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + int tot = crossRefServiceImpl.getRecordsCount("test query"); + assertEquals(10, tot); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void crossRefImportMetadataGetRecordByIdTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream crossRefResp = getClass().getResourceAsStream("crossRef-by-id.json")) { + + String crossRefRespXmlResp = IOUtils.toString(crossRefResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(crossRefRespXmlResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + collection2match.remove(1); + ImportRecord recordImported = crossRefServiceImpl.getRecord("10.26693/jmbs01.02.184"); + assertNotNull(recordImported); + Collection recordsImported = Arrays.asList(recordImported); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test(expected = MethodNotFoundException.class) + public void crossRefImportMetadataFindMatchingRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + org.dspace.content.Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, col1) + .withTitle("test item") + .withIssueDate("2021") + .build(); + + context.restoreAuthSystemState(); + crossRefServiceImpl.findMatchingRecords(testItem); + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + //define first record + List metadatums = new ArrayList(); + MetadatumDTO title = createMetadatumDTO("dc", "title", null, + "State of Awareness of Freshers’ Groups Chortkiv State" + + " Medical College of Prevention of Iodine Deficiency Diseases"); + MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); + MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", + "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); + MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); + MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); + MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); + MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + + metadatums.add(title); + metadatums.add(author); + metadatums.add(date); + metadatums.add(type); + metadatums.add(ispartof); + metadatums.add(doi); + metadatums.add(issn); + metadatums.add(volume); + metadatums.add(issue); + + ImportRecord firstrRecord = new ImportRecord(metadatums); + + //define second record + List metadatums2 = new ArrayList(); + MetadatumDTO title2 = createMetadatumDTO("dc", "title", null, + "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); + MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", + "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); + MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); + MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); + MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); + MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + + metadatums2.add(title2); + metadatums2.add(author2); + metadatums2.add(date2); + metadatums2.add(type2); + metadatums2.add(ispartof2); + metadatums2.add(doi2); + metadatums2.add(issn2); + metadatums2.add(volume2); + metadatums2.add(issue2); + + ImportRecord secondRecord = new ImportRecord(metadatums2); + records.add(firstrRecord); + records.add(secondRecord); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonAuthorityIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonAuthorityIT.java new file mode 100644 index 0000000000..3b785bbfa0 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EPersonAuthorityIT.java @@ -0,0 +1,152 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.matcher.VocabularyMatcher.matchVocabularyEntry; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.sql.SQLException; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.EPersonBuilder; +import org.hamcrest.Matchers; +import org.junit.Test; + +/** + * Integration tests for {@link EPersonAuthority}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class EPersonAuthorityIT extends AbstractControllerIntegrationTest { + + + @Test + public void testEPersonAuthorityWithFirstName() throws Exception { + + context.turnOffAuthorisationSystem(); + String firstEPersonId = createEPerson("Luca", "Giamminonni"); + String secondEPersonId = createEPerson("Andrea", "Bollini"); + String thirdEPersonId = createEPerson("Luca", "Bollini"); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Luca")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId), + matchVocabularyEntry("Luca Bollini", "Luca Bollini", "vocabularyEntry", thirdEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))); + + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Andrea")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + } + + @Test + public void testEPersonAuthorityWithLastName() throws Exception { + + context.turnOffAuthorisationSystem(); + String firstEPersonId = createEPerson("Luca", "Giamminonni"); + String secondEPersonId = createEPerson("Andrea", "Bollini"); + String thirdEPersonId = createEPerson("Luca", "Bollini"); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Giamminonni")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Bollini")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId), + matchVocabularyEntry("Luca Bollini", "Luca Bollini", "vocabularyEntry", thirdEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(2))); + + } + + @Test + public void testEPersonAuthorityWithId() throws Exception { + + context.turnOffAuthorisationSystem(); + String firstEPersonId = createEPerson("Luca", "Giamminonni"); + String secondEPersonId = createEPerson("Andrea", "Bollini"); + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", firstEPersonId)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", secondEPersonId)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", containsInAnyOrder( + matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId)))) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(1))); + + } + + @Test + public void testEPersonAuthorityWithAnonymousUser() throws Exception { + + context.turnOffAuthorisationSystem(); + createEPerson("Luca", "Giamminonni"); + createEPerson("Andrea", "Bollini"); + context.restoreAuthSystemState(); + + getClient().perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Luca")) + .andExpect(status().isUnauthorized()); + + } + + @Test + public void testEPersonAuthorityWithNotAdminUser() throws Exception { + + context.turnOffAuthorisationSystem(); + createEPerson("Luca", "Giamminonni"); + createEPerson("Andrea", "Bollini"); + createEPerson("Luca", "Bollini"); + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries") + .param("filter", "Luca")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.entries", empty())) + .andExpect(jsonPath("$.page.totalElements", Matchers.is(0))); + + } + + private String createEPerson(String firstName, String lastName) throws SQLException { + return EPersonBuilder.createEPerson(context) + .withNameInMetadata(firstName, lastName) + .build() + .getID() + .toString(); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..60408e6d8b --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/EpoImportMetadataSourceServiceIT.java @@ -0,0 +1,221 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.epo.service.EpoImportMetadataSourceServiceImpl; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link EpoImportMetadataSourceServiceImpl} + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class EpoImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClient; + + @Autowired + private EpoImportMetadataSourceServiceImpl epoServiceImpl; + + @Test + public void epoImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + InputStream file2token = null; + InputStream file = null; + InputStream file2 = null; + InputStream file3 = null; + String originKey = epoServiceImpl.getConsumerKey(); + String originSecret = epoServiceImpl.getConsumerSecret(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try { + file2token = getClass().getResourceAsStream("epo-token.json"); + file = getClass().getResourceAsStream("epo-resp.xml"); + file2 = getClass().getResourceAsStream("epo-first.xml"); + file3 = getClass().getResourceAsStream("epo-second.xml"); + + String tokenResp = IOUtils.toString(file2token, Charset.defaultCharset()); + String epoResp = IOUtils.toString(file, Charset.defaultCharset()); + String epoResp2 = IOUtils.toString(file2, Charset.defaultCharset()); + String epoResp3 = IOUtils.toString(file3, Charset.defaultCharset()); + + epoServiceImpl.setConsumerKey("test-key"); + epoServiceImpl.setConsumerSecret("test-secret"); + liveImportClient.setHttpClient(httpClient); + + CloseableHttpResponse responseWithToken = mockResponse(tokenResp, 200, "OK"); + CloseableHttpResponse response1 = mockResponse(epoResp, 200, "OK"); + CloseableHttpResponse response2 = mockResponse(epoResp2, 200, "OK"); + CloseableHttpResponse response3 = mockResponse(epoResp3, 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())) + .thenReturn(responseWithToken, response1, response2, response3); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = epoServiceImpl.getRecords("test query", 0, 2); + assertEquals(2, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + if (Objects.nonNull(file2token)) { + file2token.close(); + } + if (Objects.nonNull(file)) { + file.close(); + } + if (Objects.nonNull(file2)) { + file2.close(); + } + if (Objects.nonNull(file3)) { + file3.close(); + } + epoServiceImpl.setConsumerKey(originKey); + epoServiceImpl.setConsumerSecret(originSecret); + liveImportClient.setHttpClient(originalHttpClient); + } + } + + @Test + public void epoImportMetadataGetRecordsCountTest() throws Exception { + context.turnOffAuthorisationSystem(); + InputStream file = null; + InputStream file2 = null; + String originKey = epoServiceImpl.getConsumerKey(); + String originSecret = epoServiceImpl.getConsumerSecret(); + CloseableHttpClient originalHttpClient = liveImportClient.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try { + file = getClass().getResourceAsStream("epo-token.json"); + file2 = getClass().getResourceAsStream("epo-resp.xml"); + String token = IOUtils.toString(file, Charset.defaultCharset()); + String epoResp = IOUtils.toString(file2, Charset.defaultCharset()); + + epoServiceImpl.setConsumerKey("test-key"); + epoServiceImpl.setConsumerSecret("test-secret"); + liveImportClient.setHttpClient(httpClient); + + CloseableHttpResponse responseWithToken = mockResponse(token, 200, "OK"); + CloseableHttpResponse response1 = mockResponse(epoResp, 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(responseWithToken, response1); + + context.restoreAuthSystemState(); + int tot = epoServiceImpl.getRecordsCount("test query"); + assertEquals(10000, tot); + } finally { + if (Objects.nonNull(file)) { + file.close(); + } + if (Objects.nonNull(file2)) { + file2.close(); + } + epoServiceImpl.setConsumerKey(originKey); + epoServiceImpl.setConsumerSecret(originSecret); + liveImportClient.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + //define first record + List metadatums = new ArrayList(); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "epodoc:ES2902749T"); + MetadatumDTO identifier = createMetadatumDTO("dc", "identifier", null, "18705153"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2022-01-29"); + MetadatumDTO dateSubmitted = createMetadatumDTO("dc", "date", "submitted", "2018-01-19"); + MetadatumDTO applicant = createMetadatumDTO("dc", "contributor", null, "PANKA BLOOD TEST GMBH"); + MetadatumDTO applicant2 = createMetadatumDTO("dc", "contributor", null, "Panka Blood Test GmbH"); + MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "PANTEL KLAUS"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", " BARTKOWIAK KAI"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "PANTEL, Klaus, "); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "BARTKOWIAK, Kai"); + MetadatumDTO title = createMetadatumDTO("dc", "title", null, "Método para el diagnóstico del cáncer de mama"); + MetadatumDTO subject = createMetadatumDTO("dc", "subject", null, + "G01N 33/ 574 A I "); + + metadatums.add(identifierOther); + metadatums.add(identifier); + metadatums.add(date); + metadatums.add(dateSubmitted); + metadatums.add(applicant); + metadatums.add(applicant2); + metadatums.add(author); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(title); + metadatums.add(subject); + + ImportRecord firstrRecord = new ImportRecord(metadatums); + + //define second record + List metadatums2 = new ArrayList(); + MetadatumDTO identifierOther2 = createMetadatumDTO("dc", "identifier", "other", "epodoc:TW202202864"); + MetadatumDTO identifier2 = createMetadatumDTO("dc", "identifier", null, "109122801"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2022-01-16"); + MetadatumDTO dateSubmitted2 = createMetadatumDTO("dc", "date", "submitted", "2020-01-06"); + MetadatumDTO applicant3 = createMetadatumDTO("dc", "contributor", null, "ADVANTEST CORP [JP]"); + MetadatumDTO applicant4 = createMetadatumDTO("dc", "contributor", null, "ADVANTEST CORPORATION"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "POEPPE OLAF [DE]"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", " HILLIGES KLAUS-DIETER [DE]"); + MetadatumDTO author7 = createMetadatumDTO("dc", "contributor", "author", " KRECH ALAN [US]"); + MetadatumDTO author8 = createMetadatumDTO("dc", "contributor", "author", "POEPPE, OLAF, "); + MetadatumDTO author9 = createMetadatumDTO("dc", "contributor", "author", "HILLIGES, KLAUS-DIETER, "); + MetadatumDTO author10 = createMetadatumDTO("dc", "contributor", "author", "KRECH, ALAN"); + MetadatumDTO title2 = createMetadatumDTO("dc", "title", null, + "Automated test equipment for testing one or more devices under test, method for automated" + + " testing of one or more devices under test, and computer program using a buffer memory"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, + "G01R 31/ 319 A I "); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, + "G01R 31/ 3193 A I "); + metadatums2.add(identifierOther2); + metadatums2.add(identifier2); + metadatums2.add(date2); + metadatums2.add(dateSubmitted2); + metadatums2.add(applicant3); + metadatums2.add(applicant4); + metadatums2.add(author5); + metadatums2.add(author6); + metadatums2.add(author7); + metadatums2.add(author8); + metadatums2.add(author9); + metadatums2.add(author10); + metadatums2.add(title2); + metadatums2.add(subject2); + metadatums2.add(subject3); + + ImportRecord secondRecord = new ImportRecord(metadatums2); + records.add(firstrRecord); + records.add(secondRecord); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java new file mode 100644 index 0000000000..8c1c534de1 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/HealthIndicatorsIT.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.configuration.ActuatorConfiguration.UP_WITH_ISSUES_STATUS; +import static org.dspace.app.rest.link.search.HealthIndicatorMatcher.match; +import static org.dspace.app.rest.link.search.HealthIndicatorMatcher.matchDatabase; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.util.Map; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.junit.Test; +import org.springframework.boot.actuate.health.Status; + +/** + * Integration tests to verify the health indicators configuration. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class HealthIndicatorsIT extends AbstractControllerIntegrationTest { + + private static final String HEALTH_PATH = "/actuator/health"; + + @Test + public void testWithAnonymousUser() throws Exception { + + getClient().perform(get(HEALTH_PATH)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.status", is(UP_WITH_ISSUES_STATUS.getCode()))) + .andExpect(jsonPath("$.components").doesNotExist()); + + } + + @Test + public void testWithNotAdminUser() throws Exception { + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get(HEALTH_PATH)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.status", is(UP_WITH_ISSUES_STATUS.getCode()))) + .andExpect(jsonPath("$.components").doesNotExist()); + + } + + @Test + public void testWithAdminUser() throws Exception { + + String token = getAuthToken(admin.getEmail(), password); + + getClient(token).perform(get(HEALTH_PATH)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.status", is(UP_WITH_ISSUES_STATUS.getCode()))) + .andExpect(jsonPath("$.components", allOf( + matchDatabase(Status.UP), + match("solrSearchCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), + match("solrStatisticsCore", Status.UP, Map.of("status", 0, "detectedPathType", "root")), + match("geoIp", UP_WITH_ISSUES_STATUS, + Map.of("reason", "The required 'dbfile' configuration is missing in solr-statistics.cfg!")) + ))); + + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/InfoEndpointIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/InfoEndpointIT.java new file mode 100644 index 0000000000..62de6e1d00 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/InfoEndpointIT.java @@ -0,0 +1,81 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.services.ConfigurationService; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for info actuator. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class InfoEndpointIT extends AbstractControllerIntegrationTest { + + private static final String INFO_PATH = "/actuator/info"; + + @Autowired + private ConfigurationService configurationService; + + @Test + public void testWithAnonymousUser() throws Exception { + + getClient().perform(get(INFO_PATH)) + .andExpect(status().isUnauthorized()); + + } + + @Test + public void testWithNotAdminUser() throws Exception { + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get(INFO_PATH)) + .andExpect(status().isForbidden()); + } + + @Test + public void testWithAdminUser() throws Exception { + + String token = getAuthToken(admin.getEmail(), password); + + getClient(token).perform(get(INFO_PATH)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.app.name", matchProperty("dspace.name"))) + .andExpect(jsonPath("$.app.dir", matchProperty("dspace.dir"))) + .andExpect(jsonPath("$.app.url", matchProperty("dspace.server.url"))) + .andExpect(jsonPath("$.app.db", matchProperty("db.url"))) + .andExpect(jsonPath("$.app.solr.server", matchProperty("solr.server"))) + .andExpect(jsonPath("$.app.solr.prefix", matchProperty("solr.multicorePrefix"))) + .andExpect(jsonPath("$.app.mail.server", matchProperty("mail.server"))) + .andExpect(jsonPath("$.app.mail.from-address", matchProperty("mail.from.address"))) + .andExpect(jsonPath("$.app.mail.feedback-recipient", matchProperty("feedback.recipient"))) + .andExpect(jsonPath("$.app.mail.mail-admin", matchProperty("mail.admin"))) + .andExpect(jsonPath("$.app.mail.mail-helpdesk", matchProperty("mail.helpdesk"))) + .andExpect(jsonPath("$.app.mail.alert-recipient", matchProperty("alert.recipient"))) + .andExpect(jsonPath("$.app.cors.allowed-origins", matchProperty("rest.cors.allowed-origins"))) + .andExpect(jsonPath("$.app.ui.url", matchProperty("dspace.ui.url"))) + .andExpect(jsonPath("$.java").exists()); + + } + + private Matcher matchProperty(String name) { + return is(configurationService.getProperty(name)); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java index 95ec537727..c1327355b9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemRestRepositoryIT.java @@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; import static org.dspace.core.Constants.WRITE; +import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -3861,6 +3862,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(jsonPath("$.inArchive", Matchers.is(false))) .andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/items/" + item.getID().toString()))) + .andExpect(jsonPath("$._links.accessStatus.href", + Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus"))) .andExpect(jsonPath("$._links.bundles.href", Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles"))) .andExpect(jsonPath("$._links.mappedCollections.href", @@ -3893,6 +3896,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(jsonPath("$.inArchive", Matchers.is(false))) .andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/items/" + item.getID().toString()))) + .andExpect(jsonPath("$._links.accessStatus.href", + Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus"))) .andExpect(jsonPath("$._links.bundles.href", Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles"))) .andExpect(jsonPath("$._links.mappedCollections.href", @@ -3926,6 +3931,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest { Matchers.containsString("/api/core/items/" + item.getID().toString()))) .andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/items/" + item.getID().toString()))) + .andExpect(jsonPath("$._links.accessStatus.href", + Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus"))) .andExpect(jsonPath("$._links.bundles.href", Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles"))) .andExpect(jsonPath("$._links.mappedCollections.href", @@ -4376,4 +4383,35 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(status().isUnauthorized()); } + @Test + public void findAccessStatusForItemBadRequestTest() throws Exception { + getClient().perform(get("/api/core/items/{uuid}/accessStatus", "1")) + .andExpect(status().isBadRequest()); + } + + @Test + public void findAccessStatusForItemNotFoundTest() throws Exception { + UUID fakeUUID = UUID.randomUUID(); + getClient().perform(get("/api/core/items/{uuid}/accessStatus", fakeUUID)) + .andExpect(status().isNotFound()); + } + + @Test + public void findAccessStatusForItemTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection owningCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Owning Collection") + .build(); + Item item = ItemBuilder.createItem(context, owningCollection) + .withTitle("Test item") + .build(); + context.restoreAuthSystemState(); + getClient().perform(get("/api/core/items/{uuid}/accessStatus", item.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.status", notNullValue())); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java index e326474e77..d8e53c770c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RelationshipRestRepositoryIT.java @@ -3349,4 +3349,28 @@ public class RelationshipRestRepositoryIT extends AbstractEntityIntegrationTest .andExpect(jsonPath("$.page.totalElements", is(2))); } + @Test + public void findTheCreatedRelationshipTypeTest() throws Exception { + + context.turnOffAuthorisationSystem(); + + Relationship relationship = RelationshipBuilder + .createRelationshipBuilder(context, author1, orgUnit1, isOrgUnitOfPersonRelationshipType).build(); + + context.restoreAuthSystemState(); + + Integer relationshipId = relationship.getID(); + getClient().perform(get("/api/core/relationships/" + relationshipId)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is(relationship.getID()))) + .andExpect(jsonPath("$._embedded.relationships").doesNotExist()) + .andExpect(jsonPath("$._links.relationshipType.href", + containsString("/api/core/relationships/" + relationshipId + "/relationshipType")) + ); + + String adminToken = getAuthToken(admin.getEmail(), password); + getClient(adminToken).perform(get("/api/core/relationships/" + relationshipId + "/relationshipType")) + .andExpect(status().isOk()); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResearcherProfileRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResearcherProfileRestRepositoryIT.java new file mode 100644 index 0000000000..a2889bd6aa --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResearcherProfileRestRepositoryIT.java @@ -0,0 +1,1246 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static com.jayway.jsonpath.JsonPath.read; +import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static java.util.Arrays.asList; +import static java.util.UUID.fromString; +import static org.dspace.app.rest.matcher.HalMatcher.matchLinks; +import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataNotEmpty; +import static org.dspace.app.rest.matcher.ResourcePolicyMatcher.matchResourcePolicyProperties; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertEquals; +import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.io.UnsupportedEncodingException; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicReference; + +import com.jayway.jsonpath.JsonPath; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.ReplaceOperation; +import org.dspace.app.rest.repository.ResearcherProfileRestRepository; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.dspace.content.service.ItemService; +import org.dspace.core.Constants; +import org.dspace.eperson.EPerson; +import org.dspace.services.ConfigurationService; +import org.dspace.util.UUIDUtils; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.MediaType; +import org.springframework.test.web.servlet.MvcResult; + +/** + * Integration tests for {@link ResearcherProfileRestRepository}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegrationTest { + + @Autowired + private ConfigurationService configurationService; + + @Autowired + private ItemService itemService; + + private EPerson user; + + private EPerson anotherUser; + + private Collection personCollection; + + /** + * Tests setup. + */ + @Override + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + user = EPersonBuilder.createEPerson(context) + .withEmail("user@example.com") + .withPassword(password) + .build(); + + anotherUser = EPersonBuilder.createEPerson(context) + .withEmail("anotherUser@example.com") + .withPassword(password) + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + personCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Profile Collection") + .withEntityType("Person") + .withSubmitterGroup(user) + .withTemplateItem() + .build(); + + configurationService.setProperty("researcher-profile.collection.uuid", personCollection.getID().toString()); + + context.setCurrentUser(user); + + context.restoreAuthSystemState(); + + } + + /** + * Verify that the findById endpoint returns the own profile. + * + * @throws Exception + */ + @Test + public void testFindById() throws Exception { + + UUID id = user.getID(); + String name = user.getFullName(); + + String authToken = getAuthToken(user.getEmail(), password); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, personCollection) + .withDspaceObjectOwner(name, id.toString()) + .build(); + + context.restoreAuthSystemState(); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is(id.toString()))) + .andExpect(jsonPath("$.visible", is(true))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("item"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id.toString(), 0))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("eperson"))) + .andExpect(jsonPath("$.name", is(name))); + + } + + /** + * Verify that the an admin user can call the findById endpoint to get a + * profile. + * + * @throws Exception + */ + @Test + public void testFindByIdWithAdmin() throws Exception { + + UUID id = user.getID(); + String name = user.getFullName(); + + String authToken = getAuthToken(admin.getEmail(), password); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, personCollection) + .withDspaceObjectOwner(name, id.toString()) + .build(); + + context.restoreAuthSystemState(); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is(id.toString()))) + .andExpect(jsonPath("$.visible", is(true))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("item"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id.toString(), 0))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("eperson"))) + .andExpect(jsonPath("$.name", is(name))); + + } + + /** + * Verify that a standard user can't access the profile of another user. + * + * @throws Exception + */ + @Test + public void testFindByIdWithoutOwnerUser() throws Exception { + + UUID id = user.getID(); + String name = user.getFullName(); + + String authToken = getAuthToken(anotherUser.getEmail(), password); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, personCollection) + .withDspaceObjectOwner(name, id.toString()) + .build(); + + context.restoreAuthSystemState(); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isForbidden()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isForbidden()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isForbidden()); + + } + + /** + * Verify that the createAndReturn endpoint create a new researcher profile. + * + * @throws Exception + */ + @Test + public void testCreateAndReturn() throws Exception { + + String id = user.getID().toString(); + String name = user.getName(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("item"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id, 0))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("eperson"))) + .andExpect(jsonPath("$.name", is(name))); + + String itemId = getItemIdByProfileId(authToken, id); + Item profileItem = itemService.find(context, UUIDUtils.fromString(itemId)); + + getClient(getAuthToken(admin.getEmail(), password)) + .perform(get("/api/authz/resourcepolicies/search/resource") + .param("uuid", itemId)) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.resourcepolicies", containsInAnyOrder( + matchResourcePolicyProperties(null, user, profileItem, null, Constants.READ, null), + matchResourcePolicyProperties(null, user, profileItem, null, Constants.WRITE, null)))) + .andExpect(jsonPath("$.page.totalElements", is(2))); + + } + + @Test + public void testCreateAndReturnWithPublicProfile() throws Exception { + + configurationService.setProperty("researcher-profile.set-new-profile-visible", true); + String id = user.getID().toString(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(true))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + } + + /** + * Verify that an admin can call the createAndReturn endpoint to store a new + * researcher profile related to another user. + * + * @throws Exception + */ + @Test + public void testCreateAndReturnWithAdmin() throws Exception { + + String id = user.getID().toString(); + String name = user.getName(); + + configurationService.setProperty("researcher-profile.collection.uuid", null); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson", id) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("item"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id, 0))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("eperson"))) + .andExpect(jsonPath("$.name", is(name))); + + authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + } + + @Test + public void testCreateAndReturnWithoutCollectionIdSet() throws Exception { + + String id = user.getID().toString(); + + configurationService.setProperty("researcher-profile.collection.uuid", null); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + String itemId = getItemIdByProfileId(authToken, id); + Item profileItem = itemService.find(context, UUIDUtils.fromString(itemId)); + assertThat(profileItem, notNullValue()); + assertThat(profileItem.getOwningCollection(), is(personCollection)); + + } + + @Test + public void testCreateAndReturnWithCollectionHavingInvalidEntityTypeSet() throws Exception { + + String id = user.getID().toString(); + + context.turnOffAuthorisationSystem(); + + Collection orgUnitCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("OrgUnit Collection") + .withEntityType("OrgUnit") + .withSubmitterGroup(user) + .withTemplateItem() + .build(); + + context.restoreAuthSystemState(); + + configurationService.setProperty("researcher-profile.collection.uuid", orgUnitCollection.getID().toString()); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson"))); + + String itemId = getItemIdByProfileId(authToken, id); + Item profileItem = itemService.find(context, UUIDUtils.fromString(itemId)); + assertThat(profileItem, notNullValue()); + assertThat(profileItem.getOwningCollection(), is(personCollection)); + + } + + /** + * Verify that a standard user can't call the createAndReturn endpoint to store + * a new researcher profile related to another user. + * + * @throws Exception + */ + @Test + public void testCreateAndReturnWithoutOwnUser() throws Exception { + + String authToken = getAuthToken(anotherUser.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson", user.getID().toString()) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isForbidden()); + + } + + /** + * Verify that a conflict occurs if an user that have already a profile call the + * createAndReturn endpoint. + * + * @throws Exception + */ + @Test + public void testCreateAndReturnWithProfileAlreadyAssociated() throws Exception { + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))) + .andExpect(jsonPath("$.type", is("profile"))); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isUnprocessableEntity()); + + } + + /** + * Verify that an unprocessable entity status is back when the createAndReturn + * is called to create a profile for an unknown user. + * + * @throws Exception + */ + @Test + public void testCreateAndReturnWithUnknownEPerson() throws Exception { + + String unknownId = UUID.randomUUID().toString(); + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson", unknownId) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isUnprocessableEntity()); + } + + /** + * Verify that a user can delete his profile using the delete endpoint. + * + * @throws Exception + */ + @Test + public void testDelete() throws Exception { + + configurationService.setProperty("researcher-profile.hard-delete.enabled", false); + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + AtomicReference itemIdRef = new AtomicReference<>(); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", hasJsonPath("$.metadata", matchMetadataNotEmpty("dspace.object.owner")))) + .andDo(result -> itemIdRef.set(fromString(read(result.getResponse().getContentAsString(), "$.id")))); + + getClient(authToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + getClient(authToken).perform(get("/api/core/items/{id}", itemIdRef.get())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", hasJsonPath("$.metadata", matchMetadataDoesNotExist("dspace.object.owner")))); + + } + + /** + * Verify that a user can hard delete his profile using the delete endpoint. + * + * @throws Exception + */ + @Test + public void testHardDelete() throws Exception { + + configurationService.setProperty("researcher-profile.hard-delete.enabled", true); + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + AtomicReference itemIdRef = new AtomicReference<>(); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", hasJsonPath("$.metadata", matchMetadataNotEmpty("dspace.object.owner")))) + .andDo(result -> itemIdRef.set(fromString(read(result.getResponse().getContentAsString(), "$.id")))); + + getClient(authToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + getClient(authToken).perform(get("/api/core/items/{id}", itemIdRef.get())) + .andExpect(status().isNotFound()); + + } + + /** + * Verify that an admin can delete a profile of another user using the delete + * endpoint. + * + * @throws Exception + */ + @Test + public void testDeleteWithAdmin() throws Exception { + + String id = user.getID().toString(); + + String adminToken = getAuthToken(admin.getEmail(), password); + String userToken = getAuthToken(user.getEmail(), password); + + getClient(userToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(adminToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + getClient(adminToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + } + + /** + * Verify that an user can delete his profile using the delete endpoint even if + * was created by an admin. + * + * @throws Exception + */ + @Test + public void testDeleteProfileCreatedByAnAdmin() throws Exception { + + String id = user.getID().toString(); + + String adminToken = getAuthToken(admin.getEmail(), password); + String userToken = getAuthToken(user.getEmail(), password); + + getClient(adminToken).perform(post("/api/eperson/profiles/") + .param("eperson", id) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(adminToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(userToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + getClient(adminToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + } + + /** + * Verify that a standard user can't call the delete endpoint to delete a + * researcher profile related to another user. + * + * @throws Exception + */ + @Test + public void testDeleteWithoutOwnUser() throws Exception { + + String id = user.getID().toString(); + + String userToken = getAuthToken(user.getEmail(), password); + String anotherUserToken = getAuthToken(anotherUser.getEmail(), password); + + getClient(userToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(anotherUserToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isForbidden()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + } + + /** + * Verify that an user can change the profile visibility using the patch endpoint. + * + * @throws Exception + */ + @Test + public void testPatchToChangeVisibleAttribute() throws Exception { + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.visible", is(false))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(false))); + + String itemId = getItemIdByProfileId(authToken, id); + + getClient().perform(get("/api/core/items/{id}", itemId)) + .andExpect(status().isUnauthorized()); + + // change the visibility to true + List operations = asList(new ReplaceOperation("/visible", true)); + + getClient(authToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + + getClient().perform(get("/api/core/items/{id}", itemId)) + .andExpect(status().isOk()); + + // change the visibility to false + operations = asList(new ReplaceOperation("/visible", false)); + + getClient(authToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(false))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(false))); + + getClient().perform(get("/api/core/items/{id}", itemId)) + .andExpect(status().isUnauthorized()); + + } + + /** + * Verify that an user can not change the profile visibility of another user + * using the patch endpoint. + * + * @throws Exception + */ + @Test + public void testPatchToChangeVisibleAttributeWithoutOwnUser() throws Exception { + + String id = user.getID().toString(); + + String userToken = getAuthToken(user.getEmail(), password); + String anotherUserToken = getAuthToken(anotherUser.getEmail(), password); + + getClient(userToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.visible", is(false))); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + // try to change the visibility to true + List operations = asList(new ReplaceOperation("/visible", true)); + + getClient(anotherUserToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isForbidden()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(false))); + } + + /** + * Verify that an admin can change the profile visibility of another user using + * the patch endpoint. + * + * @throws Exception + */ + @Test + public void testPatchToChangeVisibleAttributeWithAdmin() throws Exception { + + String id = user.getID().toString(); + + String adminToken = getAuthToken(admin.getEmail(), password); + String userToken = getAuthToken(user.getEmail(), password); + + getClient(userToken).perform(post("/api/eperson/profiles/") + .param("eperson", id) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + // change the visibility to true + List operations = asList(new ReplaceOperation("/visible", true)); + + getClient(adminToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + } + + /** + * Verify that an user can change the visibility of his profile using the patch + * endpoint even if was created by an admin. + * + * @throws Exception + */ + @Test + public void testPatchToChangeVisibilityOfProfileCreatedByAnAdmin() throws Exception { + + String id = user.getID().toString(); + + String adminToken = getAuthToken(admin.getEmail(), password); + String userToken = getAuthToken(user.getEmail(), password); + + getClient(adminToken).perform(post("/api/eperson/profiles/") + .param("eperson", id) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(adminToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + // change the visibility to true + List operations = asList(new ReplaceOperation("/visible", true)); + + getClient(userToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.visible", is(true))); + } + + @Test + public void testPatchToChangeVisibleAttributeOfNotExistProfile() throws Exception { + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.visible", is(false))); + + getClient(authToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + List operations = asList(new ReplaceOperation("/visible", true)); + + getClient(authToken).perform(patch("/api/eperson/profiles/{id}", id) + .content(getPatchContent(operations)) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isNotFound()); + } + + /** + * Verify that after an user login an automatic claim between the logged eperson + * and possible profiles without eperson is done. + * + * @throws Exception + */ + @Test + public void testAutomaticProfileClaimByEmail() throws Exception { + + String id = user.getID().toString(); + + String adminToken = getAuthToken(admin.getEmail(), password); + + // create and delete a profile + getClient(adminToken).perform(post("/api/eperson/profiles/") + .param("eperson", id) + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + String firstItemId = getItemIdByProfileId(adminToken, id); + + getClient(adminToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + + getClient(adminToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + // the automatic claim is done after the user login + String userToken = getAuthToken(user.getEmail(), password); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + // the profile item should be the same + String secondItemId = getItemIdByProfileId(adminToken, id); + assertEquals("The item should be the same", firstItemId, secondItemId); + + } + + @Test + public void testAutomaticProfileClaimByEmailWithRegularEntity() throws Exception { + + String userToken = getAuthToken(user.getEmail(), password); + + context.turnOffAuthorisationSystem(); + + Item itemToBeClaimed = ItemBuilder.createItem(context, personCollection) + .withPersonEmail(user.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + String id = user.getID().toString(); + + getClient(userToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNotFound()); + + // the automatic claim is done after the user login + String newUserToken = getAuthToken(user.getEmail(), password); + + getClient(newUserToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + // the profile item should be the same + String firstItemId = itemToBeClaimed.getID().toString(); + String secondItemId = getItemIdByProfileId(newUserToken, id); + assertEquals("The item should be the same", firstItemId, secondItemId); + + } + + @Test + public void testNoAutomaticProfileClaimOccursIfManyClaimableItemsAreFound() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withNameInMetadata("Test", "User") + .withPassword(password) + .withEmail("test@email.it") + .build(); + + ItemBuilder.createItem(context, personCollection) + .withPersonEmail("test@email.it") + .build(); + + ItemBuilder.createItem(context, personCollection) + .withPersonEmail("test@email.it") + .build(); + + context.restoreAuthSystemState(); + + String epersonId = ePerson.getID().toString(); + + getClient(getAuthToken(ePerson.getEmail(), password)) + .perform(get("/api/eperson/profiles/{id}", epersonId)) + .andExpect(status().isNotFound()); + + } + + @Test + public void testNoAutomaticProfileClaimOccursIfItemHasNotAnEmail() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withNameInMetadata("Test", "User") + .withPassword(password) + .withEmail("test@email.it") + .build(); + + ItemBuilder.createItem(context, personCollection) + .withPersonIdentifierFirstName("Test") + .withPersonIdentifierLastName("User") + .build(); + + context.restoreAuthSystemState(); + + String epersonId = ePerson.getID().toString(); + + getClient(getAuthToken(ePerson.getEmail(), password)) + .perform(get("/api/eperson/profiles/{id}", epersonId)) + .andExpect(status().isNotFound()); + + } + + @Test + public void testNoAutomaticProfileClaimOccursIfTheUserHasAlreadyAProfile() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withNameInMetadata("Test", "User") + .withPassword(password) + .withEmail("test@email.it") + .build(); + + context.restoreAuthSystemState(); + + String epersonId = ePerson.getID().toString(); + + String token = getAuthToken(ePerson.getEmail(), password); + + getClient(token).perform(post("/api/eperson/profiles/") + .contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()); + + getClient(token).perform(get("/api/eperson/profiles/{id}", epersonId)) + .andExpect(status().isOk()); + + String profileItemId = getItemIdByProfileId(token, epersonId); + + context.turnOffAuthorisationSystem(); + + ItemBuilder.createItem(context, personCollection) + .withPersonEmail("test@email.it") + .build(); + + context.restoreAuthSystemState(); + + token = getAuthToken(ePerson.getEmail(), password); + + String newProfileItemId = getItemIdByProfileId(token, epersonId); + assertEquals("The item should be the same", newProfileItemId, profileItemId); + + } + + @Test + public void testNoAutomaticProfileClaimOccursIfTheFoundProfileIsAlreadyClaimed() throws Exception { + + context.turnOffAuthorisationSystem(); + + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withNameInMetadata("Test", "User") + .withPassword(password) + .withEmail("test@email.it") + .build(); + + ItemBuilder.createItem(context, personCollection) + .withTitle("Admin User") + .withPersonEmail("test@email.it") + .withDspaceObjectOwner("Admin User", admin.getID().toString()) + .build(); + + context.restoreAuthSystemState(); + + String epersonId = ePerson.getID().toString(); + + String token = getAuthToken(ePerson.getEmail(), password); + + getClient(token).perform(get("/api/eperson/profiles/{id}", epersonId)) + .andExpect(status().isNotFound()); + + } + + @Test + public void researcherProfileClaim() throws Exception { + String id = user.getID().toString(); + String name = user.getName(); + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .withPersonEmail(user.getEmail()) + .build(); + + final Item otherPerson = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 2") + .withPersonEmail(user.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isCreated()) + .andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.type", is("profile"))) + .andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + user.getID(), + "item", "eperson"))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}", id)) + .andExpect(status().isOk()); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("item"))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.object.owner", name, id, 0))) + .andExpect(jsonPath("$.metadata", matchMetadata("dspace.entity.type", "Person", 0))); + + getClient(authToken).perform(get("/api/eperson/profiles/{id}/eperson", id)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("eperson"))) + .andExpect(jsonPath("$.name", is(name))); + + // trying to claim another profile + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + otherPerson.getID().toString())) + .andExpect(status().isUnprocessableEntity()); + + // other person trying to claim same profile + context.turnOffAuthorisationSystem(); + EPerson ePerson = EPersonBuilder.createEPerson(context) + .withCanLogin(true) + .withEmail("foo@bar.baz") + .withPassword(password) + .withNameInMetadata("Test", "User") + .build(); + + context.restoreAuthSystemState(); + + final String ePersonToken = getAuthToken(ePerson.getEmail(), password); + + getClient(ePersonToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isBadRequest()); + + getClient(authToken).perform(delete("/api/eperson/profiles/{id}", id)) + .andExpect(status().isNoContent()); + } + + @Test + public void researcherProfileClaimWithoutEmail() throws Exception { + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isBadRequest()); + } + + @Test + public void researcherProfileClaimWithDifferentEmail() throws Exception { + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .withPersonEmail(eperson.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isBadRequest()); + } + + @Test + public void testNotAdminUserClaimProfileOfAnotherUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson" , anotherUser.getID().toString()) + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isForbidden()); + } + + @Test + public void testAdminUserClaimProfileOfNotExistingPersonId() throws Exception { + + String id = "bef23ba3-9aeb-4f7b-b153-77b0f1fc3612"; + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson" , id) + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void testAdminUserClaimProfileOfWrongPersonId() throws Exception { + + String id = "invalid_id"; + + context.turnOffAuthorisationSystem(); + + final Item person = ItemBuilder.createItem(context, personCollection) + .withTitle("Test User 1") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(admin.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .param("eperson" , id) + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + person.getID().toString())) + .andExpect(status().isBadRequest()); + } + + @Test + public void claimForNotAllowedEntityType() throws Exception { + context.turnOffAuthorisationSystem(); + + final Collection publications = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Publication") + .build(); + + final Item publication = ItemBuilder.createItem(context, publications) + .withTitle("title") + .build(); + + context.restoreAuthSystemState(); + + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + publication.getID().toString())) + .andExpect(status().isBadRequest()); + } + + @Test + public void testCloneFromExternalProfileAlreadyAssociated() throws Exception { + + String id = user.getID().toString(); + String authToken = getAuthToken(user.getEmail(), password); + + getClient(authToken).perform(post("/api/eperson/profiles/").contentType(MediaType.APPLICATION_JSON_VALUE)) + .andExpect(status().isCreated()).andExpect(jsonPath("$.id", is(id))) + .andExpect(jsonPath("$.visible", is(false))).andExpect(jsonPath("$.type", is("profile"))); + + getClient(authToken) + .perform(post("/api/eperson/profiles/").contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/items/" + id)) + .andExpect(status().isUnprocessableEntity()); + } + + private String getItemIdByProfileId(String token, String id) throws Exception { + MvcResult result = getClient(token).perform(get("/api/eperson/profiles/{id}/item", id)) + .andExpect(status().isOk()) + .andReturn(); + + return readAttributeFromResponse(result, "$.id"); + } + + private T readAttributeFromResponse(MvcResult result, String attribute) throws UnsupportedEncodingException { + return JsonPath.read(result.getResponse().getContentAsString(), attribute); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResourcePolicyRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResourcePolicyRestRepositoryIT.java index 60d69bf455..0283460711 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResourcePolicyRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ResourcePolicyRestRepositoryIT.java @@ -11,10 +11,13 @@ import static com.jayway.jsonpath.JsonPath.read; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST_VALUE; +import static org.springframework.http.MediaType.parseMediaType; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @@ -29,6 +32,7 @@ import java.util.concurrent.atomic.AtomicReference; import javax.ws.rs.core.MediaType; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.matcher.ResourcePolicyMatcher; import org.dspace.app.rest.model.ResourcePolicyRest; import org.dspace.app.rest.model.patch.AddOperation; @@ -2272,6 +2276,474 @@ public class ResourcePolicyRestRepositoryIT extends AbstractControllerIntegratio hasJsonPath("$.name", is(name))))); } + @Test + public void patchAddActionTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item myItem = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(myItem) + .withUser(eperson) + .withName("My Name") + .build(); + + context.restoreAuthSystemState(); + + //Patch with a write action + String action = Constants.actionText[1]; + List ops = new ArrayList(); + AddOperation addOperation = new AddOperation("/action", action); + ops.add(addOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(action))))); + + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(action))))); + } + + @Test + public void patchReplaceActionTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item myItem = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(myItem) + .withUser(eperson) + .withName("My name") + .withPolicyType(ResourcePolicy.TYPE_SUBMISSION) + .build(); + + context.restoreAuthSystemState(); + + String newAction = Constants.actionText[1]; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/action", newAction); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(newAction))))); + + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(newAction))))); + } + + @Test + public void patchReplaceActionUnauthenticatedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community).build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.WRITE) + .withDspaceObject(item) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + int newAction = Constants.ADD; + + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/action", newAction); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + getClient().perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()]))))); + } + + @Test + public void patchReplaceActionForbiddenTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community).build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(item) + .withUser(eperson) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + int newAction = Constants.WRITE; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/action", newAction); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void patchReplaceActionNotFoundTest() throws Exception { + int action = Constants.WRITE; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/policyType", action); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + Integer.MAX_VALUE) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNotFound()); + } + + @Test + public void patchReplaceActionUnprocessableEntityTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(publicItem1) + .withUser(eperson) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + int newAction = -1; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/action", newAction); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void patchAddPolicyTypeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item myItem = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(myItem) + .withUser(eperson) + .withName("My Name") + .build(); + + context.restoreAuthSystemState(); + + String policyType = ResourcePolicy.TYPE_CUSTOM; + List ops = new ArrayList(); + AddOperation addOperation = new AddOperation("/policyType", policyType); + ops.add(addOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.policyType", is(policyType)), + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()]))))); + + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()])), + hasJsonPath("$.policyType", is(policyType))))); + } + + @Test + public void patchRemovePolicyTypeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item myItem = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(myItem) + .withUser(eperson) + .withName("My Name") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + List ops = new ArrayList(); + RemoveOperation removeOperation = new RemoveOperation("/policyType"); + ops.add(removeOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()]))))) + .andExpect(jsonPath("$.policyType").doesNotExist()); + + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()]))))) + .andExpect(jsonPath("$.policyType").doesNotExist()); + } + + @Test + public void patchReplacePolicyTypeTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item myItem = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(myItem) + .withUser(eperson) + .withName("My name") + .withPolicyType(ResourcePolicy.TYPE_SUBMISSION) + .build(); + + context.restoreAuthSystemState(); + + String newPolicyType = ResourcePolicy.TYPE_CUSTOM; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/policyType", newPolicyType); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.policyType", is(newPolicyType)), + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()]))))); + + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()])), + hasJsonPath("$.policyType", is(newPolicyType))))); + } + + @Test + public void patchReplacePolicyTypeUnauthenticatedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community).build(); + + Item item = ItemBuilder.createItem(context, collection).build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.WRITE) + .withDspaceObject(item) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + String newPolicyType = ResourcePolicy.TYPE_SUBMISSION; + + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/policyType", newPolicyType); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + getClient().perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.action", is(Constants.actionText[resourcePolicy.getAction()])), + hasJsonPath("$.policyType", is(resourcePolicy.getRpType()))))); + } + + @Test + public void patchReplacePolicyTypeForbiddenTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community).build(); + + Item item = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(item) + .withUser(eperson) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + String newPolicyType = ResourcePolicy.TYPE_SUBMISSION; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/policyType", newPolicyType); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void patchRemovePolicyTypeNotFoundTest() throws Exception { + List ops = new ArrayList(); + RemoveOperation removeOperation = new RemoveOperation("/policyType"); + ops.add(removeOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(admin.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + Integer.MAX_VALUE) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNotFound()); + } + + @Test + public void patchReplacePolicyTypeBadRequestTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).build(); + + Collection collection = CollectionBuilder.createCollection(context, community) + .withAdminGroup(eperson) + .build(); + + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Public item") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.READ) + .withDspaceObject(publicItem1) + .withUser(eperson) + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + + String newPolicyType = ""; + List ops = new ArrayList(); + ReplaceOperation replaceOperation = new ReplaceOperation("/policyType", newPolicyType); + ops.add(replaceOperation); + String patchBody = getPatchContent(ops); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(patch("/api/authz/resourcepolicies/" + resourcePolicy.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + } + @Test public void patchAddNameBadRequestTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -2723,4 +3195,495 @@ public class ResourcePolicyRestRepositoryIT extends AbstractControllerIntegratio .andExpect(jsonPath("$._links.resource.href", Matchers.allOf( Matchers.containsString("/api/authz/resourcepolicies/search/resource")))); } + + @Test + public void patchReplaceEPersonAdminTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson newEPerson = EPersonBuilder.createEPerson(context) + .withEmail("newEPerson@mail.com") + .withPassword(password) + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withUser(eperson) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + + // update eperson of the resourcePolicy + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + newEPerson.getID())) + .andExpect(status().isNoContent()); + + // verify that the resourcePolicy is related to new eperson + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(newEPerson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + } + + @Test + public void patchReplaceEPersonForbiddenTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson newEPerson = EPersonBuilder.createEPerson(context) + .withEmail("newEPerson@mail.com") + .withPassword(password) + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withUser(eperson) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + String tokenEPerson = getAuthToken(eperson.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + + // try to update eperson of resourcepolicy with normal user + getClient(tokenEPerson).perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + newEPerson.getID())) + .andExpect(status().isForbidden()); + + // verify that resourcepolicy hasn't been changed + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + } + + @Test + public void patchReplaceEPersonUnauthorizedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson newEPerson = EPersonBuilder.createEPerson(context) + .withEmail("newEPerson@mail.com") + .withPassword(password) + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withUser(eperson) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + + // try to update eperson of resourcepolicy with anonymous user + getClient().perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + newEPerson.getID())) + .andExpect(status().isUnauthorized()); + + // verify that resourcepolicy hasn't been changed + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.eperson.id", is(eperson.getID().toString()))) + .andExpect(jsonPath("$._embedded.group", nullValue())); + } + + @Test + public void patchReplaceGroupAdminTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group originGroup = GroupBuilder.createGroup(context) + .withName("origin Test Group") + .build(); + + Group newGroup = GroupBuilder.createGroup(context) + .withName("testGroupName") + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withGroup(originGroup) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(originGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + + // update group of the resourcePolicy + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + newGroup.getID())) + .andExpect(status().isNoContent()); + + // verify that the resourcePolicy is related to new group + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(newGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + } + + @Test + public void patchReplaceGroupForbiddenTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group originGroup = GroupBuilder.createGroup(context) + .withName("origin Test Group") + .build(); + + Group newGroup = GroupBuilder.createGroup(context) + .withName("testGroupName") + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withGroup(originGroup) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + String tokenEPerson = getAuthToken(eperson.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(originGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + + // try to update group of resourcepolicy with normal user + getClient(tokenEPerson).perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + newGroup.getID())) + .andExpect(status().isForbidden()); + + // verify that resourcepolicy hasn't been changed + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(originGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + } + + @Test + public void patchReplaceGroupUnauthorizedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group originGroup = GroupBuilder.createGroup(context) + .withName("origin Test Group") + .build(); + + Group newGroup = GroupBuilder.createGroup(context) + .withName("testGroupName") + .build(); + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Collection col = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + ResourcePolicy resourcePolicy = ResourcePolicyBuilder.createResourcePolicy(context) + .withAction(Constants.ADD) + .withDspaceObject(col) + .withGroup(originGroup) + .withDescription("My Description") + .withPolicyType(ResourcePolicy.TYPE_CUSTOM) + .build(); + + context.restoreAuthSystemState(); + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + // verify origin resourcepolicy + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(originGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + + // try to update group of resourcepolicy with anonymous user + getClient().perform(put("/api/authz/resourcepolicies/" + resourcePolicy.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + newGroup.getID())) + .andExpect(status().isUnauthorized()); + + // verify that resourcepolicy hasn't been changed + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/" + resourcePolicy.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.group.id", is(originGroup.getID().toString()))) + .andExpect(jsonPath("$._embedded.eperson", nullValue())); + } + + @Test + public void updateResourcePolicyOfEPersonToGroupTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("My group") + .build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("My community") + .build(); + + ResourcePolicy resourcePolicyOfEPerson = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.READ) + .withUser(eperson) + .build(); + context.restoreAuthSystemState(); + + String authToken = getAuthToken(eperson.getEmail(), password); + getClient(authToken).perform(get("/api/authz/resourcepolicies/" + resourcePolicyOfEPerson.getID())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$", is(ResourcePolicyMatcher.matchResourcePolicy(resourcePolicyOfEPerson)))) + .andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/authz/resourcepolicies/" + + resourcePolicyOfEPerson.getID()))); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicyOfEPerson.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + group.getID())) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void updateResourcePolicyOfGroupToEPersonTest() throws Exception { + context.turnOffAuthorisationSystem(); + Group group = GroupBuilder.createGroup(context) + .withName("My group") + .build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("My community") + .build(); + + ResourcePolicy resourcePolicyOfGroup = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.ADD) + .withGroup(group).build(); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(get("/api/authz/resourcepolicies/search/group") + .param("uuid", group.getID().toString())) + .andExpect(status().isOk()) + .andExpect(content().contentType(contentType)) + .andExpect(jsonPath("$._embedded.resourcepolicies", Matchers.containsInAnyOrder( + ResourcePolicyMatcher.matchResourcePolicy(resourcePolicyOfGroup)))) + .andExpect(jsonPath("$._links.self.href", Matchers.containsString( + "api/authz/resourcepolicies/search/group"))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicyOfGroup.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + eperson.getID())) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void updateEPersonOfNotExistingResourcePolicyTest() throws Exception { + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + Integer.MAX_VALUE + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + eperson.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void updateGroupOfNotExistingResourcePolicyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("My group") + .build(); + + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + Integer.MAX_VALUE + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + group.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void updateResourcePolicyOfGroupWithEmptyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group = GroupBuilder.createGroup(context) + .withName("My group") + .build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("My community") + .build(); + + + ResourcePolicy resourcePolicyOfGroup = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.ADD) + .withGroup(group).build(); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicyOfGroup.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content(StringUtils.EMPTY)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void updateResourcePolicyOfGroupWithMultipleGroupsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Group group1 = GroupBuilder.createGroup(context).withName("My group").build(); + Group group2 = GroupBuilder.createGroup(context).withName("My group2").build(); + + Community community = CommunityBuilder.createCommunity(context) + .withName("My community") + .build(); + + ResourcePolicy resourcePolicyOfGroup = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.ADD) + .withGroup(group1).build(); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + resourcePolicyOfGroup.getID() + "/group") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/groups/" + group1.getID() + + "\n/api/eperson/groups/" + group2.getID())) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void updateResourcePolicyOfEPersonWithEmptyTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).withName("My community").build(); + + ResourcePolicy rpOfEPerson = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.READ) + .withUser(eperson) + .build(); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + rpOfEPerson.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content(StringUtils.EMPTY)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void updateResourcePolicyOfEPersonWithMultipleEPersonsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + EPerson eperson1 = EPersonBuilder.createEPerson(context) + .withEmail("eperson1@mail.com") + .withPassword(password) + .build(); + EPerson eperson2 = EPersonBuilder.createEPerson(context) + .withEmail("eperson2@mail.com") + .withPassword(password) + .build(); + + Community community = CommunityBuilder.createCommunity(context).withName("My community").build(); + + ResourcePolicy rpOfEPerson = ResourcePolicyBuilder.createResourcePolicy(context) + .withDspaceObject(community) + .withAction(Constants.READ) + .withUser(eperson) + .build(); + context.restoreAuthSystemState(); + + String tokenAdmin = getAuthToken(admin.getEmail(), password); + getClient(tokenAdmin).perform(put("/api/authz/resourcepolicies/" + rpOfEPerson.getID() + "/eperson") + .contentType(parseMediaType(TEXT_URI_LIST_VALUE)) + .content("/api/eperson/epersons/" + eperson1.getID() + + "\n/api/eperson/epersons/" + eperson2.getID())) + .andExpect(status().isUnprocessableEntity()); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScieloImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScieloImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..aafc75a065 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScieloImportMetadataSourceServiceIT.java @@ -0,0 +1,236 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import javax.el.MethodNotFoundException; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.datamodel.Query; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.scielo.service.ScieloImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link ScieloImportMetadataSourceServiceImpl} + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class ScieloImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Autowired + private ScieloImportMetadataSourceServiceImpl scieloServiceImpl; + + @Test + public void scieloImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream scieloResp = getClass().getResourceAsStream("scielo-test.txt")) { + + String scieloRipResp = IOUtils.toString(scieloResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(scieloRipResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = scieloServiceImpl.getRecords("test query", 0, 2); + assertEquals(2, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void scieloImportMetadataGetRecordsCountTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream file = getClass().getResourceAsStream("scielo-test.txt")) { + String scieloResp = IOUtils.toString(file, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(scieloResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + int tot = scieloServiceImpl.getRecordsCount("test query"); + assertEquals(2, tot); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test(expected = MethodNotFoundException.class) + public void scieloImportMetadataFindMatchingRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + org.dspace.content.Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, col1) + .withTitle("test item") + .withIssueDate("2021") + .build(); + context.restoreAuthSystemState(); + scieloServiceImpl.findMatchingRecords(testItem); + } + + @Test(expected = MethodNotFoundException.class) + public void scieloImportMetadataGetRecordsCountByQueryTest() throws Exception { + Query q = new Query(); + q.addParameter("query", "test query"); + scieloServiceImpl.getRecordsCount(q); + } + + @Test + public void scieloImportMetadataGetRecordsByIdTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream scieloResp = getClass().getResourceAsStream("scielo-single-record.txt")) { + + String scieloRipResp = IOUtils.toString(scieloResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(scieloRipResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + collection2match.remove(1); + ImportRecord record = scieloServiceImpl.getRecord("S0185-30582021000200231-mex"); + assertNotNull(record); + Collection recordsImported = Arrays.asList(record); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + //define first record + List metadatums = new ArrayList(); + MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Nova tellus"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2021"); + MetadatumDTO citation = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.19130/iifl.nt.2021.39.2.901"); + MetadatumDTO endPage = createMetadatumDTO("oaire", "citation", "endPage", "236"); + MetadatumDTO subject = createMetadatumDTO("dc", "subject", null, "Roma"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Historia"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "ritos funerarios"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "inframundo"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "epitafios"); + MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "Torres Marzo, Ricardo"); + MetadatumDTO title = createMetadatumDTO("dc", "title", null, "Requena Jiménez, Miguel, Los espacios" + + " de la muerte en Roma, Madrid, Síntesis, 2021, 365 págs." + + " más bibliografía en línea, ISBN 978-84-135759-6-4."); + MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "39"); + MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "0185-3058"); + MetadatumDTO other = createMetadatumDTO("dc", "identifier", "other", "S0185-30582021000200231-mex"); + MetadatumDTO startPage = createMetadatumDTO("oaire", "citation", "startPage", "231"); + + metadatums.add(ispartof); + metadatums.add(date); + metadatums.add(citation); + metadatums.add(doi); + metadatums.add(endPage); + metadatums.add(subject); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(author); + metadatums.add(title); + metadatums.add(volume); + metadatums.add(issn); + metadatums.add(other); + metadatums.add(startPage); + + ImportRecord firstrRecord = new ImportRecord(metadatums); + + //define second record + List metadatums2 = new ArrayList(); + MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Revista de Derecho Privado"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2021"); + MetadatumDTO citation2 = createMetadatumDTO("oaire", "citation", "issue", "41"); + MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.18601/01234366.n41.14"); + MetadatumDTO endPage2 = createMetadatumDTO("oaire", "citation", "endPage", "418"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "sopravvenienza contrattuale"); + MetadatumDTO subject7 = createMetadatumDTO("dc", "subject", null, "covro"); + MetadatumDTO subject8 = createMetadatumDTO("dc", "subject", null, "buona fede in senso oggettivo"); + MetadatumDTO subject9 = createMetadatumDTO("dc", "subject", null, "obbligo di rinegoziare"); + MetadatumDTO subject10 = createMetadatumDTO("dc", "subject", null, "revisione del contratto"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "MAGRI, GEO"); + MetadatumDTO title2 = createMetadatumDTO("dc", "title", null, + "Rinegoziazione e revisione del contratto. Tribunale di Roma, Sez. VI, 27 agosto 2020"); + MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "0123-4366"); + MetadatumDTO other2 = createMetadatumDTO("dc", "identifier", "other", "S0123-43662021000200397-col"); + MetadatumDTO startPage2 = createMetadatumDTO("oaire", "citation", "startPage", "397"); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", + "ABSTRACT: The Tribunal of Rome imposes an obligation to renegotiate long-term contracts," + + " the balance of which has been modified by the covro pandemic. The decision establishes a" + + " general obligation for the parties to execute the contract in good faith and gives the judge" + + " the possibility of a judicial review. This is a long-awaited decision in doctrine which complies" + + " with the indications of the Supreme Court of Cassation expressed in its memorandum 56/2020."); + + metadatums2.add(ispartof2); + metadatums2.add(date2); + metadatums2.add(citation2); + metadatums2.add(doi2); + metadatums2.add(endPage2); + metadatums2.add(subject6); + metadatums2.add(subject7); + metadatums2.add(subject8); + metadatums2.add(subject9); + metadatums2.add(subject10); + metadatums2.add(author2); + metadatums2.add(title2); + metadatums2.add(issn2); + metadatums2.add(other2); + metadatums2.add(startPage2); + metadatums2.add(description); + + ImportRecord secondRecord = new ImportRecord(metadatums2); + records.add(firstrRecord); + records.add(secondRecord); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java index 7c262dbeaa..5f93411bb9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionDefinitionsControllerIT.java @@ -205,7 +205,7 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra // We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) // Match only that a section exists with a submission configuration behind - .andExpect(jsonPath("$._embedded.submissionsections", hasSize(7))) + .andExpect(jsonPath("$._embedded.submissionsections", hasSize(8))) .andExpect(jsonPath("$._embedded.submissionsections", Matchers.hasItem( allOf( @@ -257,10 +257,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalPages", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(6))) + .andExpect(jsonPath("$.page.totalPages", is(6))) .andExpect(jsonPath("$.page.number", is(0))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -283,10 +283,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=1"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page="), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalPages", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(6))) + .andExpect(jsonPath("$.page.totalPages", is(6))) .andExpect(jsonPath("$.page.number", is(1))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -309,10 +309,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=2"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalPages", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(6))) + .andExpect(jsonPath("$.page.totalPages", is(6))) .andExpect(jsonPath("$.page.number", is(2))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -335,10 +335,10 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra Matchers.containsString("page=3"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalPages", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(6))) + .andExpect(jsonPath("$.page.totalPages", is(6))) .andExpect(jsonPath("$.page.number", is(3))); getClient(tokenAdmin).perform(get("/api/config/submissiondefinitions") @@ -353,16 +353,18 @@ public class SubmissionDefinitionsControllerIT extends AbstractControllerIntegra .andExpect(jsonPath("$._links.prev.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=3"), Matchers.containsString("size=1")))) - .andExpect(jsonPath("$._links.next").doesNotExist()) + .andExpect(jsonPath("$._links.next.href", Matchers.allOf( + Matchers.containsString("/api/config/submissiondefinitions?"), + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.self.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), Matchers.containsString("page=4"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$._links.last.href", Matchers.allOf( Matchers.containsString("/api/config/submissiondefinitions?"), - Matchers.containsString("page=4"), Matchers.containsString("size=1")))) + Matchers.containsString("page=5"), Matchers.containsString("size=1")))) .andExpect(jsonPath("$.page.size", is(1))) - .andExpect(jsonPath("$.page.totalElements", is(5))) - .andExpect(jsonPath("$.page.totalPages", is(5))) + .andExpect(jsonPath("$.page.totalElements", is(6))) + .andExpect(jsonPath("$.page.totalPages", is(6))) .andExpect(jsonPath("$.page.number", is(4))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java index 6a83401f93..241bdefe21 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SubmissionFormsControllerIT.java @@ -11,6 +11,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -66,13 +67,13 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(content().contentType(contentType)) //The configuration file for the test env includes 6 forms .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect( jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - //The array of submissionforms should have a size of 6 - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(7)))) + //The array of submissionforms should have a size of 8 + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))) ; } @@ -83,12 +84,12 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(status().isOk()) .andExpect(content().contentType(contentType)) .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(1))) .andExpect(jsonPath("$.page.number", is(0))) .andExpect(jsonPath("$._links.self.href", Matchers.startsWith(REST_SERVER_URL + "config/submissionforms"))) - .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(7)))); + .andExpect(jsonPath("$._embedded.submissionforms", hasSize(equalTo(8)))); } @Test @@ -113,20 +114,20 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone"))) // check the first two rows .andExpect(jsonPath("$.rows[0].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", + SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", null, null, true,"Add an author", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", + SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", null, "You must enter a main title for this item.", false, "Enter the main title of the item.", "dc.title")))) // check a row with multiple fields .andExpect(jsonPath("$.rows[3].fields", contains( SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue", - "You must enter at least the year.", false, + null, "You must enter at least the year.", false, "Please give the date", "col-sm-4", "dc.date.issued"), - SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", + SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", null, null, false,"Enter the name of", "col-sm-8","dc.publisher")))) ; @@ -144,18 +145,18 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone"))) .andExpect(jsonPath("$.rows[0].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", + SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", null, null, true,"Add an author", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", + SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", null, "You must enter a main title for this item.", false, "Enter the main title of the item.", "dc.title")))) .andExpect(jsonPath("$.rows[3].fields",contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue", + SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue", null, "You must enter at least the year.", false, "Please give the date", "col-sm-4", "dc.date.issued"), - SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", + SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", null, null, false,"Enter the name of", "col-sm-8","dc.publisher")))); } @@ -220,20 +221,20 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe // dc.subject fields with in separate rows all linked to an authority with different // presentation modes (suggestion, name-lookup, lookup) .andExpect(jsonPath("$.rows[0].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Author", + SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Author", null, null, true, "Author field that can be associated with an authority providing suggestion", null, "dc.contributor.author", "SolrAuthorAuthority") ))) .andExpect(jsonPath("$.rows[1].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup-name", "Editor", + SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup-name", "Editor", null, null, false, "Editor field that can be associated with an authority " + "providing the special name lookup", null, "dc.contributor.editor", "SolrEditorAuthority") ))) .andExpect(jsonPath("$.rows[2].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup", "Subject", + SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup", "Subject", null, null, true, "Subject field that can be associated with an authority providing lookup", null, "dc.subject", "SolrSubjectAuthority") @@ -266,7 +267,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone"))) // our test configuration include the dc.type field with a value pair in the 8th row .andExpect(jsonPath("$.rows[7].fields", contains( - SubmissionFormFieldMatcher.matchFormFieldDefinition("dropdown", "Type", + SubmissionFormFieldMatcher.matchFormFieldDefinition("dropdown", "Type", null, null, true, "Select the type(s) of content of the item. To select more than one value in the " + "list, you may have to hold down the \"CTRL\" or \"Shift\" key.", @@ -275,6 +276,35 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe ; } + @Test + public void findFieldWithTypeBindConfig() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + + getClient(token).perform(get("/api/config/submissionforms/traditionalpageone")) + // The status has to be 200 OK + .andExpect(status().isOk()) + // We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + // Check that the JSON root matches the expected "traditionalpageone" input forms + .andExpect(jsonPath("$.id", is("traditionalpageone"))) + .andExpect(jsonPath("$.name", is("traditionalpageone"))) + .andExpect(jsonPath("$.type", is("submissionform"))) + .andExpect(jsonPath("$._links.self.href", Matchers + .startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone"))) + // check a row with type-bind 'Technical Report' + .andExpect(jsonPath("$.rows[5].fields", contains( + SubmissionFormFieldMatcher.matchFormFieldDefinition("series", "Series/Report No.", + "Technical Report", null, true, + "Enter the series and number assigned to this item by your community.", + "dc.relation.ispartofseries")))) + // check the same row with a NON-matching type-bind 'Article' (expect false) + .andExpect(((jsonPath("$.rows[5].fields", not(contains( + SubmissionFormFieldMatcher.matchFormFieldDefinition("series", "Series/Report No.", + "Article", null, true, + "Enter the series and number assigned to this item by your community.", + "dc.relation.ispartofseries"))))))); + } + @Test public void findOpenRelationshipConfig() throws Exception { String token = getAuthToken(admin.getEmail(), password); @@ -352,14 +382,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true, + .matchFormFieldDefinition("name", "Autore", null, + "\u00C8" + " richiesto almeno un autore", true, "Aggiungi un autore", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Titolo", + .matchFormFieldDefinition("onebox", "Titolo", null, "\u00C8" + " necessario inserire un titolo principale per questo item", false, "Inserisci titolo principale di questo item", "dc.title")))) .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("dropdown", "Lingua", null, false, + .matchFormFieldDefinition("dropdown", "Lingua", null, null, false, "Selezionare la lingua del contenuto principale dell'item." + " Se la lingua non compare nell'elenco, selezionare (Altro)." + " Se il contenuto non ha davvero una lingua" @@ -376,14 +407,14 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Автор", "Потрібно ввести хочаб одного автора!", + .matchFormFieldDefinition("name", "Автор", null, "Потрібно ввести хочаб одного автора!", true, "Додати автора", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Заголовок", + .matchFormFieldDefinition("onebox", "Заголовок", null, "Заговолок файла обов'язковий !", false, "Ввести основний заголовок файла", "dc.title")))) .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("dropdown", "Мова", null, false, + .matchFormFieldDefinition("dropdown", "Мова", null, null, false, "Виберiть мову головного змiсту файлу, як що мови немає у списку, вибрати (Iнша)." + " Як що вмiст вайлу не є текстовим, наприклад є фотографiєю, тодi вибрати (N/A)", null, "dc.language.iso", "common_iso_languages")))); @@ -431,14 +462,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true, + .matchFormFieldDefinition("name", "Autore", null, + "\u00C8" + " richiesto almeno un autore", true, "Aggiungi un autore", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Titolo", + .matchFormFieldDefinition("onebox", "Titolo", null, "\u00C8" + " necessario inserire un titolo principale per questo item", false, "Inserisci titolo principale di questo item", "dc.title")))) .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("dropdown", "Lingua", null, false, + .matchFormFieldDefinition("dropdown", "Lingua", null, null, false, "Selezionare la lingua del contenuto principale dell'item." + " Se la lingua non compare nell'elenco, selezionare (Altro)." + " Se il contenuto non ha davvero una lingua" @@ -455,14 +487,14 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Автор", "Потрібно ввести хочаб одного автора!", + .matchFormFieldDefinition("name", "Автор", null, "Потрібно ввести хочаб одного автора!", true, "Додати автора", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Заголовок", + .matchFormFieldDefinition("onebox", "Заголовок", null, "Заговолок файла обов'язковий !", false, "Ввести основний заголовок файла", "dc.title")))) .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("dropdown", "Мова", null, false, + .matchFormFieldDefinition("dropdown", "Мова", null, null, false, "Виберiть мову головного змiсту файлу, як що мови немає у списку, вибрати (Iнша)." + " Як що вмiст вайлу не є текстовим, наприклад є фотографiєю, тодi вибрати (N/A)", null, "dc.language.iso", "common_iso_languages")))); @@ -505,14 +537,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true, + .matchFormFieldDefinition("name", "Autore", null, + "\u00C8" + " richiesto almeno un autore", true, "Aggiungi un autore", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Titolo", + .matchFormFieldDefinition("onebox", "Titolo", null, "\u00C8" + " necessario inserire un titolo principale per questo item", false, "Inserisci titolo principale di questo item", "dc.title")))) .andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("dropdown", "Lingua", null, false, + .matchFormFieldDefinition("dropdown", "Lingua", null, null, false, "Selezionare la lingua del contenuto principale dell'item." + " Se la lingua non compare nell'elenco, selezionare (Altro)." + " Se il contenuto non ha davvero una lingua" @@ -547,10 +580,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Autore", "\u00C8 richiesto almeno un autore", true, + .matchFormFieldDefinition("name", "Autore", null, "\u00C8 richiesto almeno un autore", true, "Aggiungi un autore", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Titolo", + .matchFormFieldDefinition("onebox", "Titolo", null, "\u00C8 necessario inserire un titolo principale per questo item", false, "Inserisci titolo principale di questo item", "dc.title")))); resetLocalesConfiguration(); @@ -582,10 +615,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe .andExpect(jsonPath("$._links.self.href", Matchers .startsWith(REST_SERVER_URL + "config/submissionforms/languagetest"))) .andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("name", "Autore", "\u00C8 richiesto almeno un autore", true, + .matchFormFieldDefinition("name", "Autore", null, "\u00C8 richiesto almeno un autore", true, "Aggiungi un autore", "dc.contributor.author")))) .andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher - .matchFormFieldDefinition("onebox", "Titolo", + .matchFormFieldDefinition("onebox", "Titolo", null, "\u00C8 necessario inserire un titolo principale per questo item", false, "Inserisci titolo principale di questo item", "dc.title")))); @@ -665,7 +698,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(4))) .andExpect(jsonPath("$.page.number", is(0))); @@ -692,7 +725,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(4))) .andExpect(jsonPath("$.page.number", is(1))); @@ -716,7 +749,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(4))) .andExpect(jsonPath("$.page.number", is(2))); @@ -739,7 +772,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe Matchers.containsString("/api/config/submissionforms?"), Matchers.containsString("page=3"), Matchers.containsString("size=2")))) .andExpect(jsonPath("$.page.size", is(2))) - .andExpect(jsonPath("$.page.totalElements", equalTo(7))) + .andExpect(jsonPath("$.page.totalElements", equalTo(8))) .andExpect(jsonPath("$.page.totalPages", equalTo(4))) .andExpect(jsonPath("$.page.number", is(3))); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/VuFindImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VuFindImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..c3063ca234 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/VuFindImportMetadataSourceServiceIT.java @@ -0,0 +1,199 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import javax.el.MethodNotFoundException; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Item; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.vufind.VuFindImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link VuFindImportMetadataSourceServiceImpl} + * + * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) + */ +public class VuFindImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Autowired + private VuFindImportMetadataSourceServiceImpl vuFindService; + + @Test + public void vuFindImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream vuFindRespIS = getClass().getResourceAsStream("vuFind-generic.json")) { + + String vuFindResp = IOUtils.toString(vuFindRespIS, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(vuFindResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = vuFindService.getRecords("test query", 0, 2); + assertEquals(2, recordsImported.size()); + matchRecords(new ArrayList<>(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void vuFindImportMetadataGetRecordsCountTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream vuFindRespIS = getClass().getResourceAsStream("vuFind-generic.json")) { + String vuFindResp = IOUtils.toString(vuFindRespIS, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(vuFindResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + int tot = vuFindService.getRecordsCount("test query"); + assertEquals(1994, tot); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void vuFindImportMetadataGetRecordByIdTest() throws Exception { + context.turnOffAuthorisationSystem(); + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + + try (InputStream vuFindByIdResp = getClass().getResourceAsStream("vuFind-by-id.json")) { + + String vuFindResp = IOUtils.toString(vuFindByIdResp, Charset.defaultCharset()); + + liveImportClientImpl.setHttpClient(httpClient); + CloseableHttpResponse response = mockResponse(vuFindResp, 200, "OK"); + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(response); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + collection2match.remove(1); + ImportRecord recordImported = vuFindService.getRecord("653510"); + assertNotNull(recordImported); + Collection recordsImported = Arrays.asList(recordImported); + matchRecords(new ArrayList<>(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test(expected = MethodNotFoundException.class) + public void vuFindImportMetadataFindMatchingRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + org.dspace.content.Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + + Item testItem = ItemBuilder.createItem(context, col1) + .withTitle("test item") + .withIssueDate("2021") + .build(); + context.restoreAuthSystemState(); + vuFindService.findMatchingRecords(testItem); + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + //define first record + List metadatums = new ArrayList(); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "653510"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "Italian"); + MetadatumDTO title = createMetadatumDTO("dc", "title", null, + "La pianta marmorea di Roma antica: Forma urbis Romae /"); + MetadatumDTO subject = createMetadatumDTO("dc", "subject", null, "Rome (Italy)"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Maps"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Early works to 1800."); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Rome (Italy)"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Antiquities"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Maps."); + MetadatumDTO identifier = createMetadatumDTO("dc", "identifier", null, + "http://hdl.handle.net/20.500.12390/231"); + metadatums.add(identifierOther); + metadatums.add(language); + metadatums.add(title); + metadatums.add(identifier); + metadatums.add(subject); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + + ImportRecord firstrRecord = new ImportRecord(metadatums); + + //define second record + List metadatums2 = new ArrayList(); + MetadatumDTO identifierOther2 = createMetadatumDTO("dc", "identifier", "other", "1665326"); + MetadatumDTO language2 = createMetadatumDTO("dc", "language", "iso", "English"); + MetadatumDTO title2 = createMetadatumDTO("dc", "title", null, + "Expert frames : scientific and policy practices of Roma classification /"); + MetadatumDTO subject7 = createMetadatumDTO("dc", "subject", null, "Public opinion"); + MetadatumDTO subject8 = createMetadatumDTO("dc", "subject", null, "Europe."); + MetadatumDTO subject9 = createMetadatumDTO("dc", "subject", null, "Stereotypes (Social psychology)"); + MetadatumDTO subject10 = createMetadatumDTO("dc", "subject", null, "Romanies"); + MetadatumDTO subject11 = createMetadatumDTO("dc", "subject", null, "Public opinion."); + MetadatumDTO identifier2 = createMetadatumDTO("dc", "identifier", null, + "http://ezproxy.villanova.edu/login?URL=http://www.jstor.org/stable/10.7829/j.ctt1ggjj08"); + metadatums2.add(identifierOther2); + metadatums2.add(language2); + metadatums2.add(title2); + metadatums2.add(identifier2); + metadatums2.add(subject7); + metadatums2.add(subject8); + metadatums2.add(subject9); + metadatums2.add(subject10); + metadatums2.add(subject11); + + ImportRecord secondRecord = new ImportRecord(metadatums2); + records.add(firstrRecord); + records.add(secondRecord); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java index d689dd7908..0f9d8f2bb9 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/WorkspaceItemRestRepositoryIT.java @@ -18,6 +18,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.assertTrue; import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST_VALUE; import static org.springframework.http.MediaType.parseMediaType; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -373,7 +374,9 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) .withTitle("Workspace Item 1") .withIssueDate("2017-10-17") - .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withAuthor("Smith, Donald") + .withIssn("222731-0582") + .withAuthor("Doe, John") .withSubject("ExtraEntry") .build(); @@ -1936,6 +1939,303 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration ; } + @Test + /** + * Test the update of metadata for fields configured with type-bind + * + * @throws Exception + */ + public void patchUpdateMetadataWithBindTest() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + String authToken = getAuthToken(eperson.getEmail(), password); + + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withSubmitter(eperson) + .withTitle("Workspace Item 1") + .withIssueDate("2017-10-17") + .withSubject("ExtraEntry") + .grantLicense() + .build(); + + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + + context.restoreAuthSystemState(); + + // Try to add isPartOfSeries (type bound to technical report) - this should not work and instead we'll get + // no JSON path for that field + List updateSeries = new ArrayList(); + List> seriesValues = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "New Series"); + seriesValues.add(value); + updateSeries.add(new AddOperation("/sections/traditionalpageone/dc.relation.ispartofseries", seriesValues)); + + String patchBody = getPatchContent(updateSeries); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should match an item with no series or type + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, null, null)))); + + // Verify that the metadata isn't in the workspace item + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should match an item with no series or type + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, null, null)))); + + // Set the type to Technical Report confirm it worked + List updateType = new ArrayList<>(); + List> typeValues = new ArrayList<>(); + value = new HashMap(); + value.put("value", "Technical Report"); + typeValues.add(value); + updateType.add(new AddOperation("/sections/traditionalpageone/dc.type", typeValues)); + patchBody = getPatchContent(updateType); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should now match an item with the expected type and series + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Technical Report", + null)))); + + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Technical Report", + null)))); + + // Another test, this time adding the series value should be successful and we'll see the value + patchBody = getPatchContent(updateSeries); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should match an item with the expected series and type + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, + "Technical Report", "New Series")))); + + // Verify that the metadata isn't in the workspace item + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should match an item with the expected series and type + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, + "Technical Report", "New Series")))); + + // One final update, to a different type, this should lose the series as we're back to a non-matching type + updateType = new ArrayList<>(); + typeValues = new ArrayList<>(); + value = new HashMap(); + value.put("value", "Article"); + typeValues.add(value); + updateType.add(new AddOperation("/sections/traditionalpageone/dc.type", typeValues)); + patchBody = getPatchContent(updateType); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should NOT match an item with the series "New Series" + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Article", + null)))); + + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Article", + null)))); + + // Submit the workspace item to complete the deposit (as there is no workflow configured) and ensure a + // successful result with no validation errors + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + witem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + } + + @Test + /** + * Test the update of metadata for fields configured with type-bind in the specific case where + * a field is configured more than once and could be bound to one type in a certain case, and another type + * later on (perhaps with a different label or validation rules) + * + * @throws Exception + */ + public void patchUpdateMetadataWithBindForRepeatedFieldConfigurationTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String testIsbnString = "978-3-16-148410-0"; + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1, "123456789/typebind-test") + .withName("Collection 1").build(); + String authToken = getAuthToken(eperson.getEmail(), password); + + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withSubmitter(eperson) + .withTitle("Workspace Item 1") + .withIssueDate("2017-10-17") + .withSubject("ExtraEntry") + .grantLicense() + .build(); + + //disable file upload mandatory + configurationService.setProperty("webui.submit.upload.required", false); + + context.restoreAuthSystemState(); + + // Try to add ISBN (type bound to book and book chapter) - this should not work and instead we'll get + // no JSON path for that field, because this item has no type yet + List updateOperations = new ArrayList(); + List> updateValues = new ArrayList<>(); + Map value = new HashMap(); + value.put("value", "978-3-16-148410-0"); + updateValues.add(value); + updateOperations.add(new AddOperation("/sections/typebindtest/dc.identifier.isbn", updateValues)); + + String patchBody = getPatchContent(updateOperations); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should match an item with no series or ISBN + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue + (witem, "typebindtest",null, "dc.identifier.isbn", null)))); + + // Verify that the metadata isn't in the workspace item + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue + (witem, "typebindtest", null, "dc.identifier.isbn", null)))); + + // Set the type to Book (which ISBN is bound to, in one of the two configurations) + List updateType = new ArrayList<>(); + List> typeValues = new ArrayList<>(); + value = new HashMap(); + value.put("value", "Book"); + typeValues.add(value); + updateType.add(new AddOperation("/sections/typebindtest/dc.type", typeValues)); + patchBody = getPatchContent(updateType); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) +// .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should now match an item with the expected type and no ISBN + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem, + "typebindtest","Book", "dc.identifier.isbn", null)))); + + // Fetch workspace item and confirm type has persisted + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) +// .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem, + "typebindtest", "Book", "dc.identifier.isbn", null)))); + + // Now we test that the validate process does NOT strip out ISBN metadata while it's analysing the + // Book Chapter input config, even though that won't match the current item type (Book) + patchBody = getPatchContent(updateOperations); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should now match an item with the expected type and the test ISBN + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem, "typebindtest", + "Book", "dc.identifier.isbn", testIsbnString)))); + + // Verify that the metadata is persisted + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem, "typebindtest", + "Book","dc.identifier.isbn", testIsbnString)))); + + // Switch type to "Book chapter" - this is also bound to ISBN in a second configuration of the field + // And should allow us to preserve the ISBN that was set while the type was Book + updateType = new ArrayList<>(); + typeValues = new ArrayList<>(); + value = new HashMap(); + value.put("value", "Book chapter"); + typeValues.add(value); + updateType.add(new AddOperation("/sections/typebindtest/dc.type", typeValues)); + patchBody = getPatchContent(updateType); + + getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + // Check this - we should now match an item with the expected type and no ISBN + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem,"typebindtest", + "Book chapter", "dc.identifier.isbn", testIsbnString)))); + + // Fetch workspace item and confirm type has persisted + getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.errors").doesNotExist()) + .andExpect(jsonPath("$", + Matchers.is(WorkspaceItemMatcher.matchItemWithTypeFieldAndValue(witem, + "typebindtest", "Book chapter", "dc.identifier.isbn", testIsbnString)))); + + // Submit the workspace item to complete the deposit (as there is no workflow configured) and ensure a + // successful result with no validation errors + getClient(authToken) + .perform(post(BASE_REST_SERVER_URL + "/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + witem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + } + @Test public void patchUpdateMetadataForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -7350,4 +7650,346 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration ))); } + public void verifyBitstreamPolicyNotDuplicatedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).withName("Com").build(); + Collection collection = CollectionBuilder.createCollection(context, community).withName("Col").build(); + + // Create item + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .withIssueDate("2019-01-01") + .grantLicense() + .build(); + + // Add a bitstream to the item + String bitstreamContent = "ThisIsSomeDummyText"; + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, StandardCharsets.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, witem.getItem(), is) + .withName("Bitstream") + .withMimeType("text/plain").build(); + } + + context.restoreAuthSystemState(); + + String adminToken = getAuthToken(admin.getEmail(), password); + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + // Create access condition + Map accessCondition = new HashMap<>(); + accessCondition.put("name", "openaccess"); + + // Add access condition + List addAccessCondition = new ArrayList<>(); + addAccessCondition.add(new AddOperation("/sections/upload/files/0/accessConditions/-", accessCondition)); + + String patchBody = getPatchContent(addAccessCondition); + getClient(adminToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()); + + // Deposit the item + getClient(adminToken).perform(post("/api/workflow/workflowitems") + .content("/api/submission/workspaceitems/" + witem.getID()) + .contentType(textUriContentType)) + .andExpect(status().isCreated()); + + // Bistream access policies are as expected and there are no duplicates + getClient(adminToken) + .perform(get("/api/authz/resourcepolicies/search/resource") + .param("uuid", bitstream.getID().toString())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.resourcepolicies", + Matchers.hasItems(ResourcePolicyMatcher.matchResourcePolicyProperties(anonymousGroup, null, + bitstream, ResourcePolicy.TYPE_CUSTOM, Constants.READ, "openaccess")))) + .andExpect(jsonPath("$.page.totalElements", is(1))); + } + + @Test + public void verifyUnexistingBitstreamPolicyIsDeniedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).withName("Com").build(); + Collection collection = CollectionBuilder.createCollection(context, community).withName("Col").build(); + + // Create item + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .withIssueDate("2019-01-01") + .grantLicense() + .build(); + + // Add a bitstream to the item + String bitstreamContent = "ThisIsSomeDummyText"; + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, StandardCharsets.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, witem.getItem(), is) + .withName("Bitstream") + .withMimeType("text/plain").build(); + } + + context.restoreAuthSystemState(); + + String adminToken = getAuthToken(admin.getEmail(), password); + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + // Create unexisting access condition + Map accessCondition = new HashMap<>(); + accessCondition.put("name", "t3st"); + + // Add access condition + List addAccessCondition = new ArrayList<>(); + addAccessCondition.add(new AddOperation("/sections/upload/files/0/accessConditions/-", accessCondition)); + + // Entity is unprocessable + String patchBody = getPatchContent(addAccessCondition); + getClient(adminToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void verifyUnconfiguredBitstreamPolicyIsDeniedTest() throws Exception { + context.turnOffAuthorisationSystem(); + + Community community = CommunityBuilder.createCommunity(context).withName("Com").build(); + Collection collection = CollectionBuilder.createCollection(context, community).withName("Col").build(); + + // Create item + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, collection) + .withTitle("Workspace Item") + .withIssueDate("2019-01-01") + .grantLicense() + .build(); + + // Add a bitstream to the item + String bitstreamContent = "ThisIsSomeDummyText"; + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, StandardCharsets.UTF_8)) { + bitstream = BitstreamBuilder.createBitstream(context, witem.getItem(), is) + .withName("Bitstream") + .withMimeType("text/plain").build(); + } + + context.restoreAuthSystemState(); + + String adminToken = getAuthToken(admin.getEmail(), password); + Group anonymousGroup = groupService.findByName(context, Group.ANONYMOUS); + + // Access condition exists but it is not configured in access-conditions.xml + Map accessCondition = new HashMap<>(); + accessCondition.put("name", "networkAdministration"); + + // Add access condition + List addAccessCondition = new ArrayList<>(); + addAccessCondition.add(new AddOperation("/sections/upload/files/0/accessConditions/-", accessCondition)); + + // Entity is unprocessable + String patchBody = getPatchContent(addAccessCondition); + getClient(adminToken).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void sherpaPolicySectionCacheTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Workspace Item 1") + .withIssueDate("2021-11-21") + .withAuthor("Smith, Donald") + .withIssn("2731-0582") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + AtomicReference retrievalTime = new AtomicReference(); + AtomicReference retrievalTime2 = new AtomicReference(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(false))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].titles[0]", + is("Nature Synthesis"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].issns[0]", + is("2731-0582"))))) + .andDo(result -> retrievalTime.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + Date date = new SimpleDateFormat(dateFormat).parse(retrievalTime.get()); + + // reload page, to verify that the retrievalTime is not changed + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(false))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].titles[0]", + is("Nature Synthesis"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].issns[0]", + is("2731-0582"))))) + .andDo(result -> retrievalTime2.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + Date date2 = new SimpleDateFormat(dateFormat).parse(retrievalTime2.get()); + + assertTrue(date.equals(date2)); + + // create a list of values to use in add operation + List operations = new ArrayList<>(); + operations.add(new RemoveOperation("/sections/sherpaPolicies/retrievalTime")); + + // empty the cache and verify the retrivatTime + String patchBody = getPatchContent(operations); + getClient(token).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(false))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].titles[0]", + is("Nature Synthesis"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].issns[0]", + is("2731-0582"))))) + .andDo(result -> retrievalTime.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + date = new SimpleDateFormat(dateFormat).parse(retrievalTime.get()); + + assertTrue(date.after(date2)); + + // reload page, to verify that the retrievalTime is not changed + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(false))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].titles[0]", + is("Nature Synthesis"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals[0].issns[0]", + is("2731-0582"))))) + .andDo(result -> retrievalTime2.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + date2 = new SimpleDateFormat(dateFormat).parse(retrievalTime2.get()); + assertTrue(date.equals(date2)); + } + + @Test + public void sherpaPolicySectionWithWrongIssnCacheTest() throws Exception { + context.turnOffAuthorisationSystem(); + + String dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; + + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1) + .withTitle("Workspace Item 1") + .withIssueDate("2021-11-21") + .withAuthor("Smith, Donald") + .withIssn("0000-0000") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + AtomicReference retrievalTime = new AtomicReference(); + AtomicReference retrievalTime2 = new AtomicReference(); + + String token = getAuthToken(eperson.getEmail(), password); + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(true))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.message", is("No results found"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals", nullValue())))) + .andDo(result -> retrievalTime.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + Date date = new SimpleDateFormat(dateFormat).parse(retrievalTime.get()); + + // reload page, to verify that the retrievalTime is not changed + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(true))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.message", is("No results found"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals", nullValue())))) + .andDo(result -> retrievalTime2.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + Date date2 = new SimpleDateFormat(dateFormat).parse(retrievalTime2.get()); + + assertTrue(date.equals(date2)); + + // create a list of values to use in add operation + List operations = new ArrayList<>(); + operations.add(new RemoveOperation("/sections/sherpaPolicies/retrievalTime")); + + // empty the cache and verify the retrivatTime + String patchBody = getPatchContent(operations); + getClient(token).perform(patch("/api/submission/workspaceitems/" + witem.getID()) + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(true))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.message", is("No results found"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals", nullValue())))) + .andDo(result -> retrievalTime.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + date = new SimpleDateFormat(dateFormat).parse(retrievalTime.get()); + + assertTrue(date.after(date2)); + + // reload page, to verify that the retrievalTime is not changed + getClient(token).perform(get("/api/submission/workspaceitems/" + witem.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.error", is(true))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.message", is("No results found"))))) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.sections.sherpaPolicies.sherpaResponse.journals", nullValue())))) + .andDo(result -> retrievalTime2.set(read( + result.getResponse().getContentAsString(), "$.sections.sherpaPolicies.retrievalTime"))); + + date2 = new SimpleDateFormat(dateFormat).parse(retrievalTime2.get()); + assertTrue(date.equals(date2)); + } + } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/CanClaimItemFeatureIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/CanClaimItemFeatureIT.java new file mode 100644 index 0000000000..f3ed67bf98 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/authorization/CanClaimItemFeatureIT.java @@ -0,0 +1,251 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.authorization; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.authorization.impl.CanClaimItemFeature; +import org.dspace.app.rest.converter.ItemConverter; +import org.dspace.app.rest.model.ItemRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.app.rest.utils.Utils; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Item; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Test of Profile Claim Authorization Feature implementation. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + */ +public class CanClaimItemFeatureIT extends AbstractControllerIntegrationTest { + + private Item profile; + + @Autowired + private ItemConverter itemConverter; + + @Autowired + private Utils utils; + + @Autowired + private AuthorizationFeatureService authorizationFeatureService; + + private AuthorizationFeature canClaimProfileFeature; + + private Collection personCollection; + + private String epersonToken; + + private String adminToken; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build(); + + personCollection = CollectionBuilder.createCollection(context, parentCommunity) + .withEntityType("Person") + .withName("claimableA") + .build(); + + epersonToken = getAuthToken(eperson.getEmail(), password); + + adminToken = getAuthToken(admin.getEmail(), password); + + context.restoreAuthSystemState(); + + canClaimProfileFeature = authorizationFeatureService.find(CanClaimItemFeature.NAME); + + } + + @Test + public void testCanClaimAProfile() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection) + .withPersonEmail(eperson.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + getClient(epersonToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").exists()) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + + } + + @Test + public void testCanClaimAProfileWithAnonymousUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection).build(); + + context.restoreAuthSystemState(); + + getClient().perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + } + + @Test + public void testCanClaimWithAdminUser() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection) + .withPersonEmail("myemail@test.it") + .withPersonEmail(admin.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + getClient(adminToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").exists()) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + + } + + @Test + public void testNotClaimableEntityForDifferentEmail() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection) + .withPersonEmail(eperson.getEmail()) + .build(); + + context.restoreAuthSystemState(); + + getClient(adminToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + + } + + @Test + public void testNotClaimableEntityWithoutEmail() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection) + .build(); + + context.restoreAuthSystemState(); + + getClient(adminToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + + } + + @Test + public void testNotClaimableEntity() throws Exception { + + context.turnOffAuthorisationSystem(); + + Collection publicationCollection = CollectionBuilder + .createCollection(context, context.reloadEntity(parentCommunity)) + .withEntityType("Publication") + .withName("notClaimable") + .build(); + + context.turnOffAuthorisationSystem(); + + Item publication = ItemBuilder.createItem(context, publicationCollection).build(); + + getClient(epersonToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(publication)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + + } + + @Test + public void testItemAlreadyInARelation() throws Exception { + + context.turnOffAuthorisationSystem(); + + Item ownedItem = ItemBuilder.createItem(context, personCollection) + .withDspaceObjectOwner("owner", "ownerAuthority") + .build(); + + context.restoreAuthSystemState(); + + getClient(epersonToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(ownedItem)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + + } + + @Test + public void testUserWithProfile() throws Exception { + + context.turnOffAuthorisationSystem(); + + profile = ItemBuilder.createItem(context, personCollection) + .withPersonEmail(eperson.getEmail()) + .build(); + + ItemBuilder.createItem(context, personCollection) + .withTitle("User") + .withDspaceObjectOwner("User", eperson.getID().toString()) + .build(); + + context.restoreAuthSystemState(); + + getClient(epersonToken).perform(get("/api/authz/authorizations/search/object") + .param("uri", uri(profile)) + .param("feature", canClaimProfileFeature.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded").doesNotExist()) + .andExpect(jsonPath("$.page.totalElements", equalTo(0))); + } + + private String uri(Item item) { + ItemRest itemRest = itemConverter.convert(item, Projection.DEFAULT); + return utils.linkToSingleResource(itemRest, "self").getHref(); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/health/GeoIpHealthIndicatorTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/health/GeoIpHealthIndicatorTest.java new file mode 100644 index 0000000000..d47493f23c --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/health/GeoIpHealthIndicatorTest.java @@ -0,0 +1,74 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.health; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.when; + +import java.util.Map; + +import com.maxmind.geoip2.DatabaseReader; +import org.dspace.app.rest.configuration.ActuatorConfiguration; +import org.dspace.statistics.GeoIpService; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.boot.actuate.health.Health; +import org.springframework.boot.actuate.health.Status; + +/** + * Unit tests for {@link GeoIpHealthIndicator}. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +@RunWith(MockitoJUnitRunner.class) +public class GeoIpHealthIndicatorTest { + + @Mock + private GeoIpService geoIpService; + + @InjectMocks + private GeoIpHealthIndicator geoIpHealthIndicator; + + @Mock + private DatabaseReader databaseReader; + + @Test + public void testWithGeoIpConfiguredCorrectly() { + when(geoIpService.getDatabaseReader()).thenReturn(databaseReader); + + Health health = geoIpHealthIndicator.health(); + + assertThat(health.getStatus(), is(Status.UP)); + assertThat(health.getDetails(), anEmptyMap()); + } + + @Test + public void testWithGeoIpWrongConfiguration() { + when(geoIpService.getDatabaseReader()).thenThrow(new IllegalStateException("Missing db file")); + + Health health = geoIpHealthIndicator.health(); + + assertThat(health.getStatus(), is(ActuatorConfiguration.UP_WITH_ISSUES_STATUS)); + assertThat(health.getDetails(), is(Map.of("reason", "Missing db file"))); + } + + @Test + public void testWithUnexpectedError() { + when(geoIpService.getDatabaseReader()).thenThrow(new RuntimeException("Generic error")); + + Health health = geoIpHealthIndicator.health(); + + assertThat(health.getStatus(), is(Status.DOWN)); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/link/search/HealthIndicatorMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/link/search/HealthIndicatorMatcher.java new file mode 100644 index 0000000000..4e681c95cb --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/link/search/HealthIndicatorMatcher.java @@ -0,0 +1,46 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.link.search; + +import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.is; + +import java.util.Map; + +import org.hamcrest.Matcher; +import org.springframework.boot.actuate.health.Status; + +/** + * Matcher for the health indicators. + * + * @author Luca Giamminonni (luca.giamminonni at 4science.it) + * + */ +public final class HealthIndicatorMatcher { + + private HealthIndicatorMatcher() { + + } + + public static Matcher matchDatabase(Status status) { + return allOf( + hasJsonPath("$.db"), + hasJsonPath("$.db.status", is(status.getCode())), + hasJsonPath("$.db.components", allOf( + match("dspaceDataSource", status, Map.of("database", "H2", "validationQuery", "isValid()")), + match("dataSource", status, Map.of("database", "H2", "validationQuery", "isValid()"))))); + } + + public static Matcher match(String name, Status status, Map details) { + return allOf( + hasJsonPath("$." + name), + hasJsonPath("$." + name + ".status", is(status.getCode())), + hasJsonPath("$." + name + ".details", is(details))); + } +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/AuthenticationStatusMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/AuthenticationStatusMatcher.java index 66d4ca19d4..1e986c2181 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/AuthenticationStatusMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/AuthenticationStatusMatcher.java @@ -27,7 +27,8 @@ public class AuthenticationStatusMatcher { */ public static Matcher matchFullEmbeds() { return matchEmbeds( - "eperson" + "eperson", + "specialGroups" ); } @@ -36,7 +37,9 @@ public class AuthenticationStatusMatcher { */ public static Matcher matchLinks() { return allOf( + //FIXME https://github.com/DSpace/DSpace/issues/8274 hasJsonPath("$._links.eperson.href", containsString("api/eperson/epersons")), - hasJsonPath("$._links.self.href", containsString("api/authn/status"))); + hasJsonPath("$._links.self.href", containsString("api/authn/status")), + hasJsonPath("$._links.specialGroups.href", containsString("api/authn/status/specialGroups"))); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemMatcher.java index 389b8bf492..371ad6b4b4 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/ItemMatcher.java @@ -50,6 +50,7 @@ public class ItemMatcher { */ public static Matcher matchFullEmbeds() { return matchEmbeds( + "accessStatus", "bundles[]", "mappedCollections[]", "owningCollection", @@ -65,6 +66,7 @@ public class ItemMatcher { */ public static Matcher matchLinks(UUID uuid) { return HalMatcher.matchLinks(REST_SERVER_URL + "core/items/" + uuid, + "accessStatus", "bundles", "mappedCollections", "owningCollection", diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/MetadataMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/MetadataMatcher.java index 0b6c8c972a..4a6ca96dfe 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/MetadataMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/MetadataMatcher.java @@ -15,6 +15,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import org.hamcrest.Matcher; +import org.hamcrest.Matchers; import org.hamcrest.core.StringEndsWith; /** @@ -67,6 +68,22 @@ public class MetadataMatcher { return hasJsonPath("$.['" + key + "'][" + position + "].value", is(value)); } + /** + * Gets a matcher to ensure a given value is present at a specific position in + * the list of values for a given key. + * + * @param key the metadata key. + * @param value the value that must be present. + * @param authority the authority that must be present. + * @param position the position it must be present at. + * @return the matcher. + */ + public static Matcher matchMetadata(String key, String value, String authority, int position) { + Matcher hasValue = hasJsonPath("$.['" + key + "'][" + position + "].value", is(value)); + Matcher hasAuthority = hasJsonPath("$.['" + key + "'][" + position + "].authority", is(authority)); + return Matchers.allOf(hasValue, hasAuthority); + } + /** * Gets a matcher to ensure a given key is not present. * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionFormFieldMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionFormFieldMatcher.java index 773a751b9f..47f96fd136 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionFormFieldMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/SubmissionFormFieldMatcher.java @@ -10,6 +10,7 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasNoJsonPath; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -28,13 +29,15 @@ public class SubmissionFormFieldMatcher { /** * Shortcut for the - * {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, boolean, String, String, String, String)} + * {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, String, boolean, String, String, String, String)} * with a null style and vocabulary name * * @param type * the expected input type * @param label * the expected label + * @param typeBind + * the expected type-bind field(s) * @param mandatoryMessage * the expected mandatoryMessage, can be null. If not empty the fiedl is expected to be flagged as * mandatory @@ -46,21 +49,23 @@ public class SubmissionFormFieldMatcher { * the expected metadata * @return a Matcher for all the condition above */ - public static Matcher matchFormFieldDefinition(String type, String label, String mandatoryMessage, - boolean repeatable, + public static Matcher matchFormFieldDefinition(String type, String label, String typeBind, + String mandatoryMessage, boolean repeatable, String hints, String metadata) { - return matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, null, metadata); + return matchFormFieldDefinition(type, label, typeBind, mandatoryMessage, repeatable, hints, null, metadata); } /** * Shortcut for the - * {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, boolean, String, String, String, String)} + * {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, String, boolean, String, String, String, String)} * with a null controlled vocabulary * * @param type * the expected input type * @param label * the expected label + * @param typeBind + * the expected type-bind field(s) * @param mandatoryMessage * the expected mandatoryMessage, can be null. If not empty the field is expected to be flagged as * mandatory @@ -75,10 +80,10 @@ public class SubmissionFormFieldMatcher { * the expected metadata * @return a Matcher for all the condition above */ - public static Matcher matchFormFieldDefinition(String type, String label, String mandatoryMessage, - boolean repeatable, - String hints, String style, String metadata) { - return matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, style, metadata, null); + public static Matcher matchFormFieldDefinition(String type, String label, String typeBind, + String mandatoryMessage, boolean repeatable, String hints, String style, String metadata) { + return matchFormFieldDefinition(type, label, typeBind, mandatoryMessage, repeatable, hints, style, metadata, + null); } /** @@ -88,6 +93,8 @@ public class SubmissionFormFieldMatcher { * the expected input type * @param label * the expected label + * @param typeBind + * the expected type-bind field(s) * @param mandatoryMessage * the expected mandatoryMessage, can be null. If not empty the field is expected to be flagged as * mandatory @@ -100,18 +107,20 @@ public class SubmissionFormFieldMatcher { * missing * @param metadata * the expected metadata - * @param controlled vocabulary + * @param controlledVocabulary * the expected controlled vocabulary, can be null. If null the corresponding json path is expected to be * missing * @return a Matcher for all the condition above */ - public static Matcher matchFormFieldDefinition(String type, String label, String mandatoryMessage, - boolean repeatable, String hints, String style, - String metadata, String controlledVocabulary) { + public static Matcher matchFormFieldDefinition(String type, String label, String typeBind, + String mandatoryMessage, boolean repeatable, + String hints, String style, String metadata, + String controlledVocabulary) { return allOf( // check each field definition hasJsonPath("$.input.type", is(type)), hasJsonPath("$.label", containsString(label)), + typeBind != null ? hasJsonPath("$.typeBind", contains(typeBind)) : hasNoJsonPath("$.typeBind[0]"), hasJsonPath("$.selectableMetadata[0].metadata", is(metadata)), controlledVocabulary != null ? hasJsonPath("$.selectableMetadata[0].controlledVocabulary", is(controlledVocabulary)) : hasNoJsonPath("$.selectableMetadata[0].controlledVocabulary"), @@ -166,7 +175,7 @@ public class SubmissionFormFieldMatcher { hasJsonPath("$.selectableRelationship.filter", is(filter)), hasJsonPath("$.selectableRelationship.searchConfiguration", is(searchConfiguration)), hasJsonPath("$.selectableRelationship.nameVariants", is(String.valueOf(nameVariants))), - matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, metadata)); + matchFormFieldDefinition(type, label, null, mandatoryMessage, repeatable, hints, metadata)); } /** diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/VocabularyMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/VocabularyMatcher.java index 6e23560911..b27b97d4c7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/VocabularyMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/VocabularyMatcher.java @@ -41,4 +41,14 @@ public class VocabularyMatcher { hasJsonPath("$.type", is(type)) ); } + + public static Matcher matchVocabularyEntry(String display, String value, String type, + String authority) { + return allOf( + hasJsonPath("$.display", is(display)), + hasJsonPath("$.value", is(value)), + hasJsonPath("$.type", is(type)), + hasJsonPath("$.authority", is(authority)) + ); + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/WorkspaceItemMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/WorkspaceItemMatcher.java index d2d2491171..070518f7ed 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/WorkspaceItemMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/WorkspaceItemMatcher.java @@ -82,6 +82,59 @@ public class WorkspaceItemMatcher { matchLinks(witem)); } + /** + * Check that the workspace item has the expected type and series values + * (used in type bind evaluation) + * @param witem the workspace item + * @param type the dc.type value eg. Technical Report + * @param series the series value eg. 11-23 + * @return Matcher result + */ + public static Matcher matchItemWithTypeAndSeries(WorkspaceItem witem, String type, String series) { + return allOf( + // Check workspaceitem properties + matchProperties(witem), + // Check type appears or is null + type != null ? + hasJsonPath("$.sections.traditionalpageone['dc.type'][0].value", is(type)) : + hasNoJsonPath("$.sections.traditionalpageone['dc.type'][0].value"), + // Check series as it appears (for type bind testing) + series != null ? + hasJsonPath("$.sections.traditionalpageone['dc.relation.ispartofseries'][0].value", is(series)) : + hasNoJsonPath("$.sections.traditionalpageone['dc.relation.ispartofseries'][0].value"), + matchLinks(witem) + ); + } + + /** + * Check that the workspace item has the expected type and a specific field value + * (used in type bind evaluation) + * @param witem the workspace item + * @param section form section name + * @param type the dc.type value eg. Technical Report + * @param field the field to check eg. dc.identifier.isbn + * @param value the value to check + * @return Matcher result + */ + public static Matcher matchItemWithTypeFieldAndValue(WorkspaceItem witem, + String section, String type, String field, String value) { + String fieldJsonPath = "$.sections." + section + "['" + field + "'][0].value"; + String dcTypeJsonPath = "$.sections." + section + "['dc.type'][0].value"; + return allOf( + // Check workspaceitem properties + matchProperties(witem), + // Check type appears or is null + type != null ? + hasJsonPath(dcTypeJsonPath, is(type)) : + hasNoJsonPath(dcTypeJsonPath), + // Check ISBN as it appears (for type bind testing) + value != null ? + hasJsonPath(fieldJsonPath, is(value)) : + hasNoJsonPath(fieldJsonPath), + matchLinks(witem) + ); + } + /** * Check that the id and type are exposed * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/model/AccessStatusRestTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/model/AccessStatusRestTest.java new file mode 100644 index 0000000000..7dfe3e69e0 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/model/AccessStatusRestTest.java @@ -0,0 +1,39 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.model; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import org.dspace.access.status.DefaultAccessStatusHelper; +import org.junit.Before; +import org.junit.Test; + +/** + * Test the AccessStatusRestTest class + */ +public class AccessStatusRestTest { + + AccessStatusRest accessStatusRest; + + @Before + public void setUp() throws Exception { + accessStatusRest = new AccessStatusRest(); + } + + @Test + public void testAccessStatusIsNullBeforeStatusSet() throws Exception { + assertNull(accessStatusRest.getStatus()); + } + + @Test + public void testAccessStatusIsNotNullAfterStatusSet() throws Exception { + accessStatusRest.setStatus(DefaultAccessStatusHelper.UNKNOWN); + assertNotNull(accessStatusRest.getStatus()); + } +} diff --git a/dspace-server-webapp/src/test/resources/Messages_pl.properties b/dspace-server-webapp/src/test/resources/Messages_pl.properties index 9f2db2e0da..3513cdd316 100644 --- a/dspace-server-webapp/src/test/resources/Messages_pl.properties +++ b/dspace-server-webapp/src/test/resources/Messages_pl.properties @@ -11,3 +11,4 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = [PL] Ref workflow group {1}. Delete the tasks and group first if you want to remove this user. org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = [PL] The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = [PL] Cannot create group, no group name is provided +org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = [PL] Cannot delete group, the associated workflow role still has pending tasks diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-ex.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-ex.json new file mode 100644 index 0000000000..bf602c94fd --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-ex.json @@ -0,0 +1,186 @@ +{ + "responseHeader": { + "status": 0, + "QTime": 13, + "params": { + "q": "roma", + "fl": "abstract,aff,alternate_bibcode,alternate_title,arxiv_class,author,bibcode,bibgroup,bibstem,citation_count,copyright,database,doi,doctype,first_author,grant,id,indexstamp,issue,keyword,lang,orcid_pub,orcid_user,orcid_other,page,property,pub,pubdate,read_count,title,vizier,volume,year", + "start": "0", + "internal_logging_params": "X-Amzn-Trace-Id=Root=1-624bf56a-2c4cc8a73c5ebd7648dd283d", + "rows": "2", + "wt": "json" + } + }, + "response": { + "numFound": 9383, + "start": 0, + "docs": [ + { + "bibcode": "2016PhRvL.116f1102A", + "abstract": "On September 14, 2015 at 09:50:45 UTC the two detectors of the Laser Interferometer Gravitational-Wave Observatory simultaneously observed a transient gravitational-wave signal. The signal sweeps upwards in frequency from 35 to 250 Hz with a peak gravitational-wave strain of 1.0 ×10-21. It matches the waveform predicted by general relativity for the inspiral and merger of a pair of black holes and the ringdown of the resulting single black hole. The signal was observed with a matched-filter signal-to-noise ratio of 24 and a false alarm rate estimated to be less than 1 event per 203 000 years, equivalent to a significance greater than 5.1 σ . The source lies at a luminosity distance of 41 0-180+160 Mpc corresponding to a redshift z =0.0 9-0.04+0.03 . In the source frame, the initial black hole masses are 3 6-4+5M and 2 9-4+4M , and the final black hole mass is 6 2-4+4M , with 3. 0-0.5+0.5M c2 radiated in gravitational waves. All uncertainties define 90% credible intervals. These observations demonstrate the existence of binary stellar-mass black hole systems. This is the first direct detection of gravitational waves and the first observation of a binary black hole merger.", + "aff": [ + "LIGO, California Institute of Technology, Pasadena, California 91125, USA", + "The University of Mississippi, University, Mississippi 38677, USA", + "California State University Fullerton, Fullerton, California 92831, USA", + "LAL, Université Paris-Sud, CNRS/IN2P3, Université Paris-Saclay, Orsay, France" + ], + "alternate_bibcode": [ + "2016arXiv160203837T" + ], + "arxiv_class": [ + "gr-qc", + "astro-ph.HE" + ], + "author": [ + "Abbott, B. P.", + "Babak, S.", + "Di Fiore, L.", + "Virgo Collaboration" + ], + "bibstem": [ + "PhRvL", + "PhRvL.116" + ], + "copyright": "2016: authors", + "database": [ + "astronomy", + "physics" + ], + "doctype": "article", + "doi": [ + "10.1103/PhysRevLett.116.061102" + ], + "first_author": "Abbott, B. P.", + "id": "1360978", + "issue": "6", + "keyword": [ + "General Relativity and Quantum Cosmology", + "Astrophysics - High Energy Astrophysical Phenomena" + ], + "orcid_pub": [ + "-", + "-", + "-", + "-" + ], + "page": [ + "061102" + ], + "pub": "Physical Review Letters", + "pubdate": "2016-02-00", + "title": [ + "Observation of Gravitational Waves from a Binary Black Hole Merger" + ], + "volume": "116", + "year": "2016", + "orcid_user": [ + "-", + "-", + "-", + "-" + ], + "orcid_other": [ + "-", + "-", + "-", + "-" + ], + "read_count": 639, + "property": [ + "ARTICLE", + "DATA", + "EPRINT_OPENACCESS", + "ESOURCE", + "OPENACCESS", + "PUB_OPENACCESS", + "REFEREED" + ], + "citation_count": 7348, + "indexstamp": "2022-04-04T22:08:12.481Z" + }, + { + "bibcode": "2017PhRvL.119p1101A", + "abstract": "On August 17, 2017 at 12∶41:04 UTC the Advanced LIGO and Advanced Virgo gravitational-wave detectors made their first observation of a binary neutron star inspiral. The signal, GW170817, was detected with a combined signal-to-noise ratio of 32.4 and a false-alarm-rate estimate of less than one per 8.0 ×104 years . We infer the component masses of the binary to be between 0.86 and 2.26 M , in agreement with masses of known neutron stars. Restricting the component spins to the range inferred in binary neutron stars, we find the component masses to be in the range 1.17 - 1.60 M , with the total mass of the system 2.7 4-0.01+0.04M . The source was localized within a sky region of 28 deg2 (90% probability) and had a luminosity distance of 4 0-14+8 Mpc , the closest and most precisely localized gravitational-wave signal yet. The association with the γ -ray burst GRB 170817A, detected by Fermi-GBM 1.7 s after the coalescence, corroborates the hypothesis of a neutron star merger and provides the first direct evidence of a link between these mergers and short γ -ray bursts. Subsequent identification of transient counterparts across the electromagnetic spectrum in the same location further supports the interpretation of this event as a neutron star merger. This unprecedented joint gravitational and electromagnetic observation provides insight into astrophysics, dense matter, gravitation, and cosmology.", + "aff": [ + "The University of Mississippi, University, Mississippi 38677, USA", + "The Chinese University of Hong Kong, Shatin, NT, Hong Kong", + "Nikhef, Science Park, 1098 XG Amsterdam, Netherlands", + "INFN, Sezione di Padova, I-35131 Padova, Italy" + ], + "alternate_bibcode": [ + "2017arXiv171005832T" + ], + "arxiv_class": [ + "gr-qc", + "astro-ph.HE" + ], + "author": [ + "Abbott, B. P.", + "Babak, S.", + "Vorvick, C.", + "Wade, M." + ], + "bibstem": [ + "PhRvL", + "PhRvL.119" + ], + "copyright": "2017: authors", + "database": [ + "astronomy", + "physics" + ], + "doctype": "article", + "doi": [ + "10.1103/PhysRevLett.119.161101" + ], + "first_author": "Abbott, B. P.", + "id": "11492510", + "issue": "16", + "keyword": [ + "General Relativity and Quantum Cosmology", + "Astrophysics - High Energy Astrophysical Phenomena" + ], + "orcid_pub": [ + "-", + "-", + "-", + "-" + ], + "page": [ + "161101" + ], + "pub": "Physical Review Letters", + "pubdate": "2017-10-00", + "title": [ + "GW170817: Observation of Gravitational Waves from a Binary Neutron Star Inspiral" + ], + "volume": "119", + "year": "2017", + "orcid_user": [ + "-", + "-", + "-", + "-" + ], + "orcid_other": [ + "-", + "-", + "-", + "-" + ], + "read_count": 475, + "property": [ + "ARTICLE", + "DATA", + "EPRINT_OPENACCESS", + "ESOURCE", + "OPENACCESS", + "PUB_OPENACCESS", + "REFEREED" + ], + "citation_count": 5260, + "indexstamp": "2022-04-04T22:08:08.068Z" + } + ] + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-single-obj.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-single-obj.json new file mode 100644 index 0000000000..60f8b10fe3 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/ads-single-obj.json @@ -0,0 +1,103 @@ +{ + "responseHeader": { + "status": 0, + "QTime": 11, + "params": { + "q": "bibcode:2017PhRvL.119p1101A", + "fl": "abstract,aff,alternate_bibcode,alternate_title,arxiv_class,author,bibcode,bibgroup,bibstem,citation_count,copyright,database,doi,doctype,first_author,grant,id,indexstamp,issue,keyword,lang,orcid_pub,orcid_user,orcid_other,page,property,pub,pubdate,read_count,title,vizier,volume,year", + "start": "0", + "internal_logging_params": "X-Amzn-Trace-Id=Root=1-624c3c9f-330e7d086f4e7f5e762a8cdc", + "rows": "10", + "wt": "json" + } + }, + "response": { + "numFound": 1, + "start": 0, + "docs": [ + { + "bibcode": "2016PhRvL.116f1102A", + "abstract": "On September 14, 2015 at 09:50:45 UTC the two detectors of the Laser Interferometer Gravitational-Wave Observatory simultaneously observed a transient gravitational-wave signal. The signal sweeps upwards in frequency from 35 to 250 Hz with a peak gravitational-wave strain of 1.0 ×10-21. It matches the waveform predicted by general relativity for the inspiral and merger of a pair of black holes and the ringdown of the resulting single black hole. The signal was observed with a matched-filter signal-to-noise ratio of 24 and a false alarm rate estimated to be less than 1 event per 203 000 years, equivalent to a significance greater than 5.1 σ . The source lies at a luminosity distance of 41 0-180+160 Mpc corresponding to a redshift z =0.0 9-0.04+0.03 . In the source frame, the initial black hole masses are 3 6-4+5M and 2 9-4+4M , and the final black hole mass is 6 2-4+4M , with 3. 0-0.5+0.5M c2 radiated in gravitational waves. All uncertainties define 90% credible intervals. These observations demonstrate the existence of binary stellar-mass black hole systems. This is the first direct detection of gravitational waves and the first observation of a binary black hole merger.", + "aff": [ + "LIGO, California Institute of Technology, Pasadena, California 91125, USA", + "The University of Mississippi, University, Mississippi 38677, USA", + "California State University Fullerton, Fullerton, California 92831, USA", + "LAL, Université Paris-Sud, CNRS/IN2P3, Université Paris-Saclay, Orsay, France" + ], + "alternate_bibcode": [ + "2016arXiv160203837T" + ], + "arxiv_class": [ + "gr-qc", + "astro-ph.HE" + ], + "author": [ + "Abbott, B. P.", + "Babak, S.", + "Di Fiore, L.", + "Virgo Collaboration" + ], + "bibstem": [ + "PhRvL", + "PhRvL.116" + ], + "copyright": "2016: authors", + "database": [ + "astronomy", + "physics" + ], + "doctype": "article", + "doi": [ + "10.1103/PhysRevLett.116.061102" + ], + "first_author": "Abbott, B. P.", + "id": "1360978", + "issue": "6", + "keyword": [ + "General Relativity and Quantum Cosmology", + "Astrophysics - High Energy Astrophysical Phenomena" + ], + "orcid_pub": [ + "-", + "-", + "-", + "-" + ], + "page": [ + "061102" + ], + "pub": "Physical Review Letters", + "pubdate": "2016-02-00", + "title": [ + "Observation of Gravitational Waves from a Binary Black Hole Merger" + ], + "volume": "116", + "year": "2016", + "orcid_user": [ + "-", + "-", + "-", + "-" + ], + "orcid_other": [ + "-", + "-", + "-", + "-" + ], + "read_count": 639, + "property": [ + "ARTICLE", + "DATA", + "EPRINT_OPENACCESS", + "ESOURCE", + "OPENACCESS", + "PUB_OPENACCESS", + "REFEREED" + ], + "citation_count": 7348, + "indexstamp": "2022-04-04T22:08:12.481Z" + } + ] + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-by-id.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-by-id.json new file mode 100644 index 0000000000..d889319ca7 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-by-id.json @@ -0,0 +1,169 @@ +{ + "status":"ok", + "message-type":"work", + "message-version":"1.0.0", + "message":{ + "indexed":{ + "date-parts":[ + [ + 2022, + 4, + 5 + ] + ], + "date-time":"2022-04-05T22:05:30Z", + "timestamp":1649196330913 + }, + "reference-count":0, + "publisher":"Petro Mohyla Black Sea National University", + "issue":"2", + "content-domain":{ + "domain":[ + + ], + "crossmark-restriction":false + }, + "short-container-title":[ + "Ukr. \u017e. med. b\u00ecol. sportu" + ], + "published-print":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + }, + "DOI":"10.26693\/jmbs01.02.184", + "type":"journal-article", + "created":{ + "date-parts":[ + [ + 2017, + 9, + 7 + ] + ], + "date-time":"2017-09-07T13:30:46Z", + "timestamp":1504791046000 + }, + "page":"184-187", + "source":"Crossref", + "is-referenced-by-count":0, + "title":[ + "State of Awareness of Freshers\u2019 Groups Chortkiv State Medical College of Prevention of Iodine Deficiency Diseases" + ], + "prefix":"10.26693", + "volume":"1", + "author":[ + { + "given":"L.V.", + "family":"Senyuk", + "sequence":"first", + "affiliation":[ + + ] + }, + { + "name":"Chortkiv State Medical College 7, Gogola St., Chortkiv, Ternopil region 48500, Ukraine", + "sequence":"first", + "affiliation":[ + + ] + } + ], + "member":"11225", + "published-online":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + }, + "container-title":[ + "Ukra\u00efns\u02b9kij \u017eurnal medicini, b\u00ecolog\u00ec\u00ef ta sportu" + ], + "original-title":[ + "\u0421\u0422\u0410\u041d \u041e\u0411\u0406\u0417\u041d\u0410\u041d\u041e\u0421\u0422\u0406 \u0421\u0422\u0423\u0414\u0415\u041d\u0422\u0406\u0412 \u041d\u041e\u0412\u041e\u041d\u0410\u0411\u0420\u0410\u041d\u0418\u0425 \u0413\u0420\u0423\u041f \u0427\u041e\u0420\u0422\u041a\u0406\u0412\u0421\u042c\u041a\u041e\u0413\u041e \u0414\u0415\u0420\u0416\u0410\u0412\u041d\u041e\u0413\u041e \u041c\u0415\u0414\u0418\u0427\u041d\u041e\u0413\u041e \u041a\u041e\u041b\u0415\u0414\u0416\u0423 \u0417 \u041f\u0418\u0422\u0410\u041d\u042c \u041f\u0420\u041e\u0424\u0406\u041b\u0410\u041a\u0422\u0418\u041a\u0418 \u0419\u041e\u0414\u041e\u0414\u0415\u0424\u0406\u0426\u0418\u0422\u041d\u0418\u0425 \u0417\u0410\u0425\u0412\u041e\u0420\u042e\u0412\u0410\u041d\u042c" + ], + "deposited":{ + "date-parts":[ + [ + 2017, + 9, + 8 + ] + ], + "date-time":"2017-09-08T10:14:53Z", + "timestamp":1504865693000 + }, + "score":1, + "resource":{ + "primary":{ + "URL":"http:\/\/en.jmbs.com.ua\/archive\/1\/2\/184" + } + }, + "subtitle":[ + + ], + "short-title":[ + + ], + "issued":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + }, + "references-count":0, + "journal-issue":{ + "issue":"2", + "published-online":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + }, + "published-print":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + } + }, + "URL":"http:\/\/dx.doi.org\/10.26693\/jmbs01.02.184", + "relation":{ + + }, + "ISSN":[ + "2415-3060" + ], + "issn-type":[ + { + "value":"2415-3060", + "type":"print" + } + ], + "published":{ + "date-parts":[ + [ + 2016, + 5, + 19 + ] + ] + } + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-test.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-test.json new file mode 100644 index 0000000000..69a9433868 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/crossRef-test.json @@ -0,0 +1,309 @@ +{ + "status": "ok", + "message-type": "work-list", + "message-version": "1.0.0", + "message": { + "facets": {}, + "total-results": 10, + "items": [ + { + "indexed": { + "date-parts": [ + [ + 2021, + 12, + 22 + ] + ], + "date-time": "2021-12-22T10:58:16Z", + "timestamp": 1640170696598 + }, + "reference-count": 0, + "publisher": "Petro Mohyla Black Sea National University", + "issue": "2", + "content-domain": { + "domain": [], + "crossmark-restriction": false + }, + "short-container-title": [ + "Ukr. ž. med. bìol. sportu" + ], + "published-print": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "DOI": "10.26693/jmbs01.02.184", + "type": "journal-article", + "created": { + "date-parts": [ + [ + 2017, + 9, + 7 + ] + ], + "date-time": "2017-09-07T13:30:46Z", + "timestamp": 1504791046000 + }, + "page": "184-187", + "source": "Crossref", + "is-referenced-by-count": 0, + "title": [ + "State of Awareness of Freshers’ Groups Chortkiv State Medical College of Prevention of Iodine Deficiency Diseases" + ], + "prefix": "10.26693", + "volume": "1", + "author": [ + { + "given": "L.V.", + "family": "Senyuk", + "sequence": "first", + "affiliation": [] + }, + { + "name": "Chortkiv State Medical College 7, Gogola St., Chortkiv, Ternopil region 48500, Ukraine", + "sequence": "first", + "affiliation": [] + } + ], + "member": "11225", + "published-online": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "container-title": [ + "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu" + ], + "original-title": [ + "СТАН ОБІЗНАНОСТІ СТУДЕНТІВ НОВОНАБРАНИХ ГРУП ЧОРТКІВСЬКОГО ДЕРЖАВНОГО МЕДИЧНОГО КОЛЕДЖУ З ПИТАНЬ ПРОФІЛАКТИКИ ЙОДОДЕФІЦИТНИХ ЗАХВОРЮВАНЬ" + ], + "deposited": { + "date-parts": [ + [ + 2017, + 9, + 8 + ] + ], + "date-time": "2017-09-08T10:14:53Z", + "timestamp": 1504865693000 + }, + "score": 22.728952, + "issued": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "references-count": 0, + "journal-issue": { + "issue": "2", + "published-online": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "published-print": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + } + }, + "URL": "http://dx.doi.org/10.26693/jmbs01.02.184", + "ISSN": [ + "2415-3060" + ], + "issn-type": [ + { + "value": "2415-3060", + "type": "print" + } + ], + "published": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + } + }, + { + "indexed": { + "date-parts": [ + [ + 2022, + 3, + 29 + ] + ], + "date-time": "2022-03-29T13:04:48Z", + "timestamp": 1648559088439 + }, + "reference-count": 0, + "publisher": "Petro Mohyla Black Sea National University", + "issue": "2", + "content-domain": { + "domain": [], + "crossmark-restriction": false + }, + "short-container-title": [ + "Ukr. ž. med. bìol. sportu" + ], + "published-print": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "DOI": "10.26693/jmbs01.02.105", + "type": "journal-article", + "created": { + "date-parts": [ + [ + 2017, + 9, + 1 + ] + ], + "date-time": "2017-09-01T10:04:04Z", + "timestamp": 1504260244000 + }, + "page": "105-108", + "source": "Crossref", + "is-referenced-by-count": 0, + "title": [ + "Ischemic Heart Disease and Role of Nurse of Cardiology Department" + ], + "prefix": "10.26693", + "volume": "1", + "author": [ + { + "given": "K. І.", + "family": "Kozak", + "sequence": "first", + "affiliation": [] + }, + { + "name": "Chortkiv State Medical College 7, Gogola St., Chortkiv, Ternopil region 48500, Ukraine", + "sequence": "first", + "affiliation": [] + } + ], + "member": "11225", + "published-online": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "container-title": [ + "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu" + ], + "original-title": [ + "ІШЕМІЧНА ХВОРОБА СЕРЦЯ ТА РОЛЬ МЕДИЧНОЇ СЕСТРИ КАРДІОЛОГІЧНОГО ВІДДІЛЕННЯ" + ], + "deposited": { + "date-parts": [ + [ + 2017, + 9, + 2 + ] + ], + "date-time": "2017-09-02T12:36:15Z", + "timestamp": 1504355775000 + }, + "score": 18.263277, + "resource": { + "primary": { + "URL": "http://en.jmbs.com.ua/archive/1/2/105" + } + }, + "issued": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "references-count": 0, + "journal-issue": { + "issue": "2", + "published-online": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + }, + "published-print": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + } + }, + "URL": "http://dx.doi.org/10.26693/jmbs01.02.105", + "ISSN": [ + "2415-3060" + ], + "issn-type": [ + { + "value": "2415-3060", + "type": "print" + } + ], + "published": { + "date-parts": [ + [ + 2016, + 5, + 19 + ] + ] + } + } + ], + "items-per-page": 2, + "query": { + "start-index": 0, + "search-terms": "chortkiv" + } + } +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-first.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-first.xml new file mode 100644 index 0000000000..2499552fc2 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-first.xml @@ -0,0 +1,139 @@ + + + + + + + + ES + 2902749 + T3 + 20220329 + + + ES2902749T + 20220329 + + + + + G01N 33/ 574 A I + + + + + +
G
+ 01 + N + 33 + 57415 + I + US +
+ + +
G
+ 01 + N + 33 + 57415 + I + EP +
+ + +
G
+ 01 + N + 33 + 57484 + I + US +
+ + +
G
+ 01 + N + 2800 + 54 + A + EP +
+
+ + + ES + 18705153 + T + + + ES20180705153T + 20180219 + + + 18705153 + + + + + + EP20170157020 + 20170220 + + + 17157020 + + + + + WO2018EP54052 + 20180219 + + + PCT/EP2018/054052 + + + + + + + + PANKA BLOOD TEST GMBH + + + + + Panka Blood Test GmbH + + + + + + + PANTEL KLAUS + + + + + BARTKOWIAK KAI + + + + + PANTEL, Klaus, + + + + + BARTKOWIAK, Kai + + + + + Método para el diagnóstico del cáncer de mama +
+
+
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-resp.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-resp.xml new file mode 100644 index 0000000000..ab3a1ad102 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-resp.xml @@ -0,0 +1,24 @@ + + + + + txt = test + + + + + ES + 2902749 + T3 + + + + + TW + 202202864 + A + + + + + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-second.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-second.xml new file mode 100644 index 0000000000..c68724a527 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-second.xml @@ -0,0 +1,400 @@ + + + + + + + + + TW + 202202864 + A + 20220116 + + + TW202202864 + 20220116 + + + + + G01R 31/ 319 A I + + + G01R 31/ 3193 A I + + + + + +
G
+ 01 + R + 31 + 31712 + I + US +
+ + +
G
+ 01 + R + 31 + 31712 + I + KR +
+ + +
G
+ 01 + R + 31 + 31713 + I + US +
+ + +
G
+ 01 + R + 31 + 31713 + I + KR +
+ + +
G
+ 01 + R + 31 + 31724 + I + US +
+ + +
G
+ 01 + R + 31 + 31724 + I + KR +
+ + +
G
+ 01 + R + 31 + 3177 + I + US +
+ + +
G
+ 01 + R + 31 + 3177 + I + KR +
+ + +
G
+ 01 + R + 31 + 318307 + I + US +
+ + +
G
+ 01 + R + 31 + 318307 + I + KR +
+ + +
G
+ 01 + R + 31 + 318533 + I + KR +
+ + +
G
+ 01 + R + 31 + 318533 + I + EP +
+ + +
G
+ 01 + R + 31 + 31905 + I + US +
+ + +
G
+ 01 + R + 31 + 31905 + I + KR +
+ + +
G
+ 01 + R + 31 + 31907 + I + KR +
+ + +
G
+ 01 + R + 31 + 31907 + I + US +
+ + +
G
+ 01 + R + 31 + 31907 + I + EP +
+ + +
G
+ 01 + R + 31 + 31908 + I + US +
+ + +
G
+ 01 + R + 31 + 31908 + I + KR +
+ + +
G
+ 01 + R + 31 + 31919 + I + KR +
+ + +
G
+ 01 + R + 31 + 31919 + I + EP +
+ + +
G
+ 01 + R + 31 + 31926 + I + US +
+ + +
G
+ 01 + R + 31 + 31926 + I + KR +
+ + +
G
+ 01 + R + 31 + 31935 + I + KR +
+ + +
G
+ 01 + R + 31 + 31935 + I + EP +
+ + +
G
+ 06 + F + 11 + 2733 + I + US +
+ + +
G
+ 06 + F + 11 + 2733 + I + KR +
+ + +
G
+ 06 + F + 13 + 20 + I + US +
+ + +
G
+ 06 + F + 13 + 20 + I + KR +
+
+ + + TW + 109122801 + A + + + TW20200122801 + 20200706 + + + 109122801 + + + + + + US201962795456P + 20190122 + + + + + WO2020EP51540 + 20200122 + + + 2020EP51540 + + + + + + + + ADVANTEST CORP [JP] + + + + + ADVANTEST CORPORATION + + + + + + + POEPPE OLAF [DE] + + + + + HILLIGES KLAUS-DIETER [DE] + + + + + KRECH ALAN [US] + + + + + POEPPE, OLAF, + + + + + HILLIGES, KLAUS-DIETER, + + + + + KRECH, ALAN + + + + + Automated test equipment for testing one or more devices under test, method for automated testing of one or more devices under test, and computer program using a buffer memory +
+
+
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-token.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-token.json new file mode 100644 index 0000000000..466a8ebeda --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/epo-token.json @@ -0,0 +1,18 @@ +{ + "refresh_token_expires_in": "0", + "api_product_list": "[ops-prod]", + "api_product_list_json": [ + "ops-prod" + ], + "organization_name": "epo", + "developer.email": "4science.it@gmail.com", + "token_type": "BearerToken", + "issued_at": "1649175500838", + "client_id": "7TzQLOPAbpDS5xwYhPwj22gbgNw0xHp2", + "access_token": "ek8VKPuuSwHmfGrjPM79iWVHcUea", + "application_name": "5aad89cd-a53b-417c-a585-7a7aa060c92e", + "scope": "core", + "expires_in": "1199", + "refresh_count": "0", + "status": "approved" +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-single-record.txt b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-single-record.txt new file mode 100644 index 0000000000..bd9934d2bc --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-single-record.txt @@ -0,0 +1,24 @@ +TY - JOUR +AU - Torres Marzo, Ricardo +TI - Requena Jiménez, Miguel, Los espacios de la muerte en Roma, Madrid, Síntesis, 2021, 365 págs. más bibliografía en línea, ISBN 978-84-135759-6-4. +JO - Nova tellus +J2 - Nova tellus +SN - 0185-3058 +VL - 39 +IS - 2 +DO - 10.19130/iifl.nt.2021.39.2.901 +DB - SciELO México +DP - http://www.scielo.org/ +ID - S0185-30582021000200231-mex +LA - es +SP - 231 +EP - 236 +DA - 2021-12 +PY - 2021 +UR - http://www.scielo.org.mx/scielo.php?script=sci_arttext&pid=S0185-30582021000200231&lang=pt +KW - Roma +KW - Historia +KW - ritos funerarios +KW - inframundo +KW - epitafios +ER - \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-test.txt b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-test.txt new file mode 100644 index 0000000000..4cc9d3ad36 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/scielo-test.txt @@ -0,0 +1,51 @@ +TY - JOUR +AU - Torres Marzo, Ricardo +TI - Requena Jiménez, Miguel, Los espacios de la muerte en Roma, Madrid, Síntesis, 2021, 365 págs. más bibliografía en línea, ISBN 978-84-135759-6-4. +JO - Nova tellus +J2 - Nova tellus +SN - 0185-3058 +VL - 39 +IS - 2 +DO - 10.19130/iifl.nt.2021.39.2.901 +DB - SciELO México +DP - http://www.scielo.org/ +ID - S0185-30582021000200231-mex +LA - es +SP - 231 +EP - 236 +DA - 2021-12 +PY - 2021 +UR - http://www.scielo.org.mx/scielo.php?script=sci_arttext&pid=S0185-30582021000200231&lang=pt +KW - Roma +KW - Historia +KW - ritos funerarios +KW - inframundo +KW - epitafios +ER - + +TY - JOUR +AU - MAGRI, GEO +TI - Rinegoziazione e revisione del contratto. Tribunale di Roma, Sez. VI, 27 agosto 2020 +JO - Revista de Derecho Privado +J2 - Rev. Derecho Privado +SN - 0123-4366 +VL - +IS - 41 +DO - 10.18601/01234366.n41.14 +DB - SciELO Colômbia +DP - http://www.scielo.org/ +ID - S0123-43662021000200397-col +LA - it +SP - 397 +EP - 418 +DA - 2021-12 +PY - 2021 +AB - ABSTRACT: The Tribunal of Rome imposes an obligation to renegotiate long-term contracts, the balance of which has been modified by the covro pandemic. The decision establishes a general obligation for the parties to execute the contract in good faith and gives the judge the possibility of a judicial review. This is a long-awaited decision in doctrine which complies with the indications of the Supreme Court of Cassation expressed in its memorandum 56/2020. +UR - http://www.scielo.org.co/scielo.php?script=sci_arttext&pid=S0123-43662021000200397&lang=pt +L1 - http://www.scielo.org.co/pdf/rdp/n41/0123-4366-rdp-41-397.pdf +KW - sopravvenienza contrattuale +KW - covro +KW - buona fede in senso oggettivo +KW - obbligo di rinegoziare +KW - revisione del contratto +ER - \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-by-id.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-by-id.json new file mode 100644 index 0000000000..c992ea5f57 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-by-id.json @@ -0,0 +1,44 @@ +{ + "resultCount": 1, + "records": [ + { + "authors": { + "primary": [], + "secondary": { + "Carettoni, Gianfilippo.": [] + }, + "corporate": [] + }, + "formats": [ + "Map", + "Map", + "Book" + ], + "id": "653510", + "languages": [ + "Italian" + ], + "series": [], + "subjects": [ + [ + "Rome (Italy)", + "Maps", + "Early works to 1800." + ], + [ + "Rome (Italy)", + "Antiquities", + "Maps." + ] + ], + "title": "La pianta marmorea di Roma antica: Forma urbis Romae /", + "urls": [ + { + "url": "http://hdl.handle.net/20.500.12390/231", + "desc": "http://hdl.handle.net/20.500.12390/231" + } + ] + } + ], + "status": "OK" +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-generic.json b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-generic.json new file mode 100644 index 0000000000..889ec89016 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/vuFind-generic.json @@ -0,0 +1,82 @@ +{ + "resultCount": 1994, + "records": [ + { + "authors": { + "primary": [], + "secondary": { + "Carettoni, Gianfilippo.": [] + }, + "corporate": [] + }, + "formats": [ + "Map", + "Map", + "Book" + ], + "id": "653510", + "languages": [ + "Italian" + ], + "series": [], + "subjects": [ + [ + "Rome (Italy)", + "Maps", + "Early works to 1800." + ], + [ + "Rome (Italy)", + "Antiquities", + "Maps." + ] + ], + "title": "La pianta marmorea di Roma antica: Forma urbis Romae /", + "urls": [ + { + "url": "http://hdl.handle.net/20.500.12390/231", + "desc": "http://hdl.handle.net/20.500.12390/231" + } + ] + }, + { + "authors": { + "primary": { + "Surdu, Mihai.": [] + }, + "secondary": [], + "corporate": [] + }, + "formats": [ + "Online", + "Book" + ], + "id": "1665326", + "languages": [ + "English" + ], + "series": [], + "subjects": [ + [ + "Public opinion", + "Europe." + ], + [ + "Stereotypes (Social psychology)" + ], + [ + "Romanies", + "Public opinion." + ] + ], + "title": "Expert frames : scientific and policy practices of Roma classification /", + "urls": [ + { + "url": "http://ezproxy.villanova.edu/login?URL=http://www.jstor.org/stable/10.7829/j.ctt1ggjj08", + "desc": "http://ezproxy.villanova.edu/login?URL=http://www.jstor.org/stable/10.7829/j.ctt1ggjj08" + } + ] + } + ], + "status": "OK" +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/test-config.properties b/dspace-server-webapp/src/test/resources/test-config.properties index 3af96b20fc..37db1c9f9c 100644 --- a/dspace-server-webapp/src/test/resources/test-config.properties +++ b/dspace-server-webapp/src/test/resources/test-config.properties @@ -13,4 +13,4 @@ test.folder.assetstore = ./target/testing/dspace/assetstore test.bitstream = ./target/testing/dspace/assetstore/ConstitutionofIreland.pdf #Path for a test Taskfile for the curate script -test.curateTaskFile = ./target/testing/dspace/assetstore/curate.txt +test.curateTaskFile = ./target/testing/dspace/assetstore/curate.txt \ No newline at end of file diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 893fbfc890..d24cd42b43 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -68,20 +68,22 @@ solr.multicorePrefix = ##### Database settings ##### # DSpace only supports two database types: PostgreSQL or Oracle +# PostgreSQL is highly recommended. +# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 # URL for connecting to database # * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe +# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace # JDBC Driver # * For Postgres: org.postgresql.Driver -# * For Oracle: oracle.jdbc.OracleDriver +# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver db.driver = org.postgresql.Driver # Database Dialect (for Hibernate) # * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle: org.hibernate.dialect.Oracle10gDialect +# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -90,7 +92,7 @@ db.password = dspace # Database Schema name # * For Postgres, this is often "public" (default schema) -# * For Oracle, schema is equivalent to the username of your database account, +# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, # so this may be set to ${db.username} in most scenarios. db.schema = public @@ -806,6 +808,22 @@ plugin.single.org.dspace.embargo.EmbargoSetter = org.dspace.embargo.DefaultEmbar # implementation of embargo lifter plugin - - replace with local implementation if applicable plugin.single.org.dspace.embargo.EmbargoLifter = org.dspace.embargo.DefaultEmbargoLifter +# values for the forever embargo date threshold +# This threshold date is used in the default access status helper to dermine if an item is +# restricted or embargoed based on the start date of the primary (or first) file policies. +# In this case, if the policy start date is inferior to the threshold date, the status will +# be embargo, else it will be restricted. +# You might want to change this threshold based on your needs. For example: some databases +# doesn't accept a date superior to 31 december 9999. +access.status.embargo.forever.year = 10000 +access.status.embargo.forever.month = 1 +access.status.embargo.forever.day = 1 + +# implementation of access status helper plugin - replace with local implementation if applicable +# This default access status helper provides an item status based on the policies of the primary +# bitstream (or first bitstream in the original bundles if no primary file is specified). +plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper + #### Checksum Checker Settings #### # Default dispatcher in case none specified plugin.single.org.dspace.checker.BitstreamDispatcher=org.dspace.checker.SimpleDispatcher @@ -912,6 +930,7 @@ webui.licence_bundle.show = false # since that usually contains email addresses which ought to be kept # private and is mainly of interest to administrators: metadata.hide.dc.description.provenance = true +metadata.hide.person.email = true ##### Settings for Submission Process ##### @@ -920,6 +939,11 @@ metadata.hide.dc.description.provenance = true # Defaults to true; If set to 'false', submitter has option to skip upload #webui.submit.upload.required = true +# Which field should be used for type-bind +# Defaults to 'dc.type'; If changing this value, you must also update the related +# dspace-angular environment configuration property submission.typeBind.field +#submit.type-bind.field = dc.type + #### Creative Commons settings ###### # The url to the web service API @@ -1207,20 +1231,6 @@ plugin.named.org.dspace.content.license.LicenseArgumentFormatter = \ #### Syndication Feed (RSS) Settings ###### # TODO: UNSUPPORTED in DSpace 7.0. Will be added in a later release -# enable syndication feeds - links display on community and collection home pages -webui.feed.enable = true -# number of DSpace items per feed (the most recent submissions) -webui.feed.items = 4 -# maximum number of feeds in memory cache -# value of 0 will disable caching -webui.feed.cache.size = 100 -# number of hours to keep cached feeds before checking currency -# value of 0 will force a check with each request -webui.feed.cache.age = 48 -# which syndication formats to offer -# use one or more (comma-separated) values from list: -# rss_0.90, rss_0.91, rss_0.92, rss_0.93, rss_0.94, rss_1.0, rss_2.0 -webui.feed.formats = rss_1.0,rss_2.0,atom_1.0 # URLs returned by the feed will point at the global handle server # (e.g. https://hdl.handle.net/123456789/1). Set to true to use local server # URLs (i.e. https://myserver.myorg/handle/123456789/1) @@ -1290,7 +1300,9 @@ webui.feed.item.author = dc.contributor.author # NB: for result data formatting, OpenSearch uses Syndication Feed Settings # so even if Syndication Feeds are not enabled, they must be configured # enable open search -websvc.opensearch.enable = false +websvc.opensearch.enable = true +# url used in service document +websvc.opensearch.svccontext = opensearch # context for html request URLs - change only for non-standard servlet mapping websvc.opensearch.uicontext = simple-search # present autodiscovery link in every page head @@ -1379,17 +1391,6 @@ sherpa.romeo.url = https://v2.sherpa.ac.uk/cgi/retrieve # register for a new API key sherpa.romeo.apikey = -##### Authority Control Settings ##### -#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ -# org.dspace.content.authority.SampleAuthority = Sample, \ -# org.dspace.content.authority.SHERPARoMEOPublisher = SRPublisher, \ -# org.dspace.content.authority.SHERPARoMEOJournalTitle = SRJournalTitle, \ -# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority - -#Uncomment to enable ORCID authority control -#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ -# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority - # URL of ORCID API # Defaults to using the Public API V3 (pub.orcid.org) orcid.api.url = https://pub.orcid.org/v3.0 @@ -1404,69 +1405,6 @@ orcid.clientsecret = #ORCID JWT Endpoint orcid.oauth.url = https://orcid.org/oauth/token -## The DCInputAuthority plugin is automatically configured with every -## value-pairs element in input-forms.xml, namely: -## common_identifiers, common_types, common_iso_languages -## -## The DSpaceControlledVocabulary plugin is automatically configured -## with every *.xml file in [dspace]/config/controlled-vocabularies, -## and creates a plugin instance for each, using base filename as the name. -## eg: nsi, srsc. -## Each DSpaceControlledVocabulary plugin comes with three configuration options: -# vocabulary.plugin._plugin_.hierarchy.store = # default: true -# vocabulary.plugin._plugin_.hierarchy.suggest = # default: false -# vocabulary.plugin._plugin_.delimiter = "" # default: "::" -## -## An example using "srsc" can be found later in this section - -plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \ - org.dspace.content.authority.DCInputAuthority, \ - org.dspace.content.authority.DSpaceControlledVocabulary - -## -## This sets the default lowest confidence level at which a metadata value is included -## in an authority-controlled browse (and search) index. It is a symbolic -## keyword, one of the following values (listed in descending order): -## accepted -## uncertain -## ambiguous -## notfound -## failed -## rejected -## novalue -## unset -## See manual or org.dspace.content.authority.Choices source for descriptions. -authority.minconfidence = ambiguous - -# Configuration settings for ORCID based authority control. -# Uncomment the lines below to enable configuration -#choices.plugin.dc.contributor.author = SolrAuthorAuthority -#choices.presentation.dc.contributor.author = authorLookup -#authority.controlled.dc.contributor.author = true -#authority.author.indexer.field.1=dc.contributor.author - -## -## This sets the lowest confidence level at which a metadata value is included -## in an authority-controlled browse (and search) index. It is a symbolic -## keyword from the same set as for the default "authority.minconfidence" -#authority.minconfidence.dc.contributor.author = accepted - -## demo: subject code autocomplete, using srsc as authority -## (DSpaceControlledVocabulary plugin must be enabled) -## Warning: when enabling this feature any controlled vocabulary configuration in the input-forms.xml for the metadata field will be overridden. -#vocabulary.plugin.srsc.hierarchy.store = true -#vocabulary.plugin.srsc.hierarchy.suggest = true -#vocabulary.plugin.srsc.delimiter = "::" - -# publisher name lookup through SHERPA/RoMEO: -#choices.plugin.dc.publisher = SRPublisher -#choices.presentation.dc.publisher = suggest - -## demo: journal title lookup, with ISSN as authority -#choices.plugin.dc.title.alternative = SRJournalTitle -#choices.presentation.dc.title.alternative = suggest -#authority.controlled.dc.title.alternative = true - ##### Google Scholar Metadata Configuration ##### google-metadata.config = ${dspace.dir}/config/crosswalks/google-metadata.properties google-metadata.enable = true @@ -1572,6 +1510,7 @@ log.report.dir = ${dspace.dir}/log # The max number of events held in the GA buffer (default: 256) # google.analytics.buffer.limit=256 +#################################################################### #---------------------------------------------------------------# #----------------REQUEST ITEM CONFIGURATION---------------------# #---------------------------------------------------------------# @@ -1584,6 +1523,11 @@ request.item.type = all # Should all Request Copy emails go to the helpdesk instead of the item submitter? request.item.helpdesk.override = false +#------------------------------------------------------------------# +#------------------SUBMISSION CONFIGURATION------------------------# +#------------------------------------------------------------------# +# Field to use for type binding, default dc.type +submit.type-bind.field = dc.type #------------------------------------------------------------------# #-------------------MODULE CONFIGURATIONS--------------------------# @@ -1608,6 +1552,7 @@ module_dir = modules # PRIOR to those below (and therefore may override configs in these default # module configuration files). +include = ${module_dir}/actuator.cfg include = ${module_dir}/altmetrics.cfg include = ${module_dir}/authentication.cfg include = ${module_dir}/authentication-ip.cfg @@ -1616,6 +1561,7 @@ include = ${module_dir}/authentication-oidc.cfg include = ${module_dir}/authentication-password.cfg include = ${module_dir}/authentication-shibboleth.cfg include = ${module_dir}/authentication-x509.cfg +include = ${module_dir}/authority.cfg include = ${module_dir}/bulkedit.cfg include = ${module_dir}/citation-page.cfg include = ${module_dir}/clamav.cfg @@ -1632,6 +1578,7 @@ include = ${module_dir}/rest.cfg include = ${module_dir}/iiif.cfg include = ${module_dir}/solr-statistics.cfg include = ${module_dir}/solrauthority.cfg +include = ${module_dir}/researcher-profile.cfg include = ${module_dir}/spring.cfg include = ${module_dir}/submission-curation.cfg include = ${module_dir}/sword-client.cfg @@ -1641,3 +1588,4 @@ include = ${module_dir}/translator.cfg include = ${module_dir}/usage-statistics.cfg include = ${module_dir}/versioning.cfg include = ${module_dir}/workflow.cfg +include = ${module_dir}/external-providers.cfg diff --git a/dspace-server-webapp/src/main/resources/iiif/cache/ehcache.xml b/dspace/config/ehcache.xml similarity index 59% rename from dspace-server-webapp/src/main/resources/iiif/cache/ehcache.xml rename to dspace/config/ehcache.xml index cc9ad164f7..41508a5fa8 100644 --- a/dspace-server-webapp/src/main/resources/iiif/cache/ehcache.xml +++ b/dspace/config/ehcache.xml @@ -1,22 +1,19 @@ - + xsi:schemaLocation="http://www.ehcache.org/v3 + http://www.ehcache.org/schema/ehcache-core-3.7.xsd"> + - org.dspace.app.rest.cache.CacheLogger + org.dspace.iiif.logger.CacheLogger ASYNCHRONOUS UNORDERED CREATED @@ -33,7 +30,7 @@ - org.dspace.app.rest.cache.CanvasCacheLogger + org.dspace.iiif.logger.CanvasCacheLogger ASYNCHRONOUS UNORDERED CREATED @@ -47,6 +44,26 @@ 4 + + + + org.dspace.app.sherpa.cache.SherpaCacheLogger + ASYNCHRONOUS + UNORDERED + CREATED + EXPIRED + REMOVED + EVICTED + + + + 3000 + 4 + + + - + + + \ No newline at end of file diff --git a/dspace/config/emails/request_item.author b/dspace/config/emails/request_item.author index 9166d31d78..ac79270e7f 100644 --- a/dspace/config/emails/request_item.author +++ b/dspace/config/emails/request_item.author @@ -1,3 +1,16 @@ +## E-mail sent to a restricted Item's author when a user requests a copy. +## +## Parameters: 0 requester's name +## 1 requester's address +## 2 name of a single bitstream, or "all" +## 3 item Handle +## 4 item title +## 5 message from requester +## 6 link back to DSpace for action +## 7 corresponding author name +## 8 corresponding author email +## 9 configuration property "dspace.name" +## 10 configuration property "mail.helpdesk" #set($subject = 'Request copy of document') Dear ${params[7]}, diff --git a/dspace/config/emails/request_item.to_admin b/dspace/config/emails/request_item.to_admin deleted file mode 100644 index 244fa5647b..0000000000 --- a/dspace/config/emails/request_item.to_admin +++ /dev/null @@ -1,13 +0,0 @@ -Subject: Request copy of document - -Dear Administrator, - -A user of {7}, named {0} and using the email {1}, requested a copy of the file(s) associated with the document: "{4}" ({3}). - -This request came along with the following message: - -"{5}" - -To answer, click {6}. - -PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). diff --git a/dspace/config/emails/suggest b/dspace/config/emails/suggest deleted file mode 100644 index 2f90b69ce1..0000000000 --- a/dspace/config/emails/suggest +++ /dev/null @@ -1,26 +0,0 @@ -## E-mail sent with the information filled out in a suggest form. -## -## Parameters: {0} recipient name -## {1} sender name -## {2} repository name -## {3} item title -## {4} item handle URI -## {5} item local URL - may be used in lieu of {4} if not using handle server -## {6} collection name -## {7} sender message -## See org.dspace.core.Email for information on the format of this file. -## -#set($subject = 'An item of interest from DSpace') - -Hello ${params[0]}: - -${params[1]} requested we send you this email regarding an item available in ${params[2]}. - -Title: ${params[3]} -Location: ${params[5]} -In Collection: ${params[6]} -Personal Message: ${params[7]} - -The DSpace digital repository system captures, stores, indexes, preserves, and distributes digital material. -For more information, visit www.dspace.org - diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 702af8f865..299a5fdfce 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -203,6 +203,12 @@ org.dspace.submit.step.SampleStep sample + + + submit.progressbar.sherpapolicy + org.dspace.app.rest.submit.step.SherpaPolicyStep + sherpaPolicy + @@ -235,8 +241,12 @@ + + + + diff --git a/dspace/config/local.cfg.EXAMPLE b/dspace/config/local.cfg.EXAMPLE index 67c03808c4..5d9398d047 100644 --- a/dspace/config/local.cfg.EXAMPLE +++ b/dspace/config/local.cfg.EXAMPLE @@ -68,20 +68,22 @@ dspace.name = DSpace at My University # DATABASE CONFIGURATION # ########################## # DSpace only supports two database types: PostgreSQL or Oracle +# PostgreSQL is highly recommended. +# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 # URL for connecting to database # * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe +# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace # JDBC Driver # * For Postgres: org.postgresql.Driver -# * For Oracle: oracle.jdbc.OracleDriver +# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver db.driver = org.postgresql.Driver # Database Dialect (for Hibernate) # * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle: org.hibernate.dialect.Oracle10gDialect +# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -90,7 +92,7 @@ db.password = dspace # Database Schema name # * For Postgres, this is often "public" (default schema) -# * For Oracle, schema is equivalent to the username of your database account, +# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, # so this may be set to ${db.username} in most scenarios. db.schema = public diff --git a/dspace/config/modules/actuator.cfg b/dspace/config/modules/actuator.cfg new file mode 100644 index 0000000000..a14a3f1cac --- /dev/null +++ b/dspace/config/modules/actuator.cfg @@ -0,0 +1,58 @@ + +#---------------------------------------------------------------# +#------------SPRING BOOT ACTUATOR CONFIGURATION-----------------# +#---------------------------------------------------------------# + +# Health endpoint configuration, for more details see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints.health + +## Configuration to establish when show the health status details +management.endpoint.health.show-details = when-authorized +## Configuration which users can see the health status details +management.endpoint.health.roles = ADMIN +## Configuration to establis +management.endpoint.health.status.order= down, out-of-service, up-with-issues, up, unknown +## Configuration that enables only health and info endpoints +management.endpoints.web.exposure.include=health,info + +## Configuration to set 200 as status of health http response when it is DOWN or OUT_OF_SERVICE +## The DSpace UI requires these be set to 200 in order to support health status reports when services are down. +management.endpoint.health.status.http-mapping.down = 200 +management.endpoint.health.status.http-mapping.out-of-service = 200 + +management.health.ping.enabled = false +management.health.diskSpace.enabled = false + +# CORS configuration for all actuators +management.endpoints.web.cors.allowed-origins = ${rest.cors.allowed-origins} +management.endpoints.web.cors.allowed-methods = * +management.endpoints.web.cors.allowed-headers = Accept, Authorization, Content-Type, Origin, X-On-Behalf-Of, X-Requested-With, X-XSRF-TOKEN, X-CORRELATION-ID, X-REFERRER +management.endpoints.web.cors.exposed-headers = Authorization, DSPACE-XSRF-TOKEN, Location, WWW-Authenticate +management.endpoints.web.cors.allow-credentials = true + +#---------------------------------------------------------------# +#------------------------INFO ENDPOINT--------------------------# +#---------------------------------------------------------------# + +# All properties under the info key will be automatically exposed by the info actuator with a json structure. Furthermore, it is possible to +# enrich the content of the response given by the info actuator by defining in the Spring context beans of classes that implements InfoContributor. + +management.info.env.enabled = true +management.info.java.enabled = true + +info.app.name = ${dspace.name} +info.app.version = ${dspace.version} +info.app.dir = ${dspace.dir} +info.app.url = ${dspace.server.url} +info.app.db = ${db.url} +info.app.solr.server = ${solr.server} +info.app.solr.prefix = ${solr.multicorePrefix} +info.app.mail.server = ${mail.server} +info.app.mail.from-address = ${mail.from.address} +info.app.mail.feedback-recipient = ${feedback.recipient} +info.app.mail.mail-admin = ${mail.admin} +info.app.mail.mail-helpdesk = ${mail.helpdesk} +info.app.mail.alert-recipient = ${alert.recipient} + +info.app.cors.allowed-origins = ${rest.cors.allowed-origins} + +info.app.ui.url = ${dspace.ui.url} diff --git a/dspace/config/modules/authority.cfg b/dspace/config/modules/authority.cfg new file mode 100644 index 0000000000..c7a0909e76 --- /dev/null +++ b/dspace/config/modules/authority.cfg @@ -0,0 +1,88 @@ +#---------------------------------------------------------------# +#----------------- AUTHORITY CONFIGURATIONS --------------------# +#---------------------------------------------------------------# +# These configs are used by the authority framework # +#---------------------------------------------------------------# + +## The DCInputAuthority plugin is automatically configured with every +## value-pairs element in input-forms.xml, namely: +## common_identifiers, common_types, common_iso_languages +## +## The DSpaceControlledVocabulary plugin is automatically configured +## with every *.xml file in [dspace]/config/controlled-vocabularies, +## and creates a plugin instance for each, using base filename as the name. +## eg: nsi, srsc. +## Each DSpaceControlledVocabulary plugin comes with three configuration options: +# vocabulary.plugin._plugin_.hierarchy.store = # default: true +# vocabulary.plugin._plugin_.hierarchy.suggest = # default: false +# vocabulary.plugin._plugin_.delimiter = "" # default: "::" +## +## An example using "srsc" can be found later in this section + +plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \ + org.dspace.content.authority.DCInputAuthority, \ + org.dspace.content.authority.DSpaceControlledVocabulary + + ## +## This sets the default lowest confidence level at which a metadata value is included +## in an authority-controlled browse (and search) index. It is a symbolic +## keyword, one of the following values (listed in descending order): +## accepted +## uncertain +## ambiguous +## notfound +## failed +## rejected +## novalue +## unset +## See manual or org.dspace.content.authority.Choices source for descriptions. +authority.minconfidence = ambiguous + +# Configuration settings for ORCID based authority control. +# Uncomment the lines below to enable configuration +#choices.plugin.dc.contributor.author = SolrAuthorAuthority +#choices.presentation.dc.contributor.author = authorLookup +#authority.controlled.dc.contributor.author = true +#authority.author.indexer.field.1=dc.contributor.author + +## +## This sets the lowest confidence level at which a metadata value is included +## in an authority-controlled browse (and search) index. It is a symbolic +## keyword from the same set as for the default "authority.minconfidence" +#authority.minconfidence.dc.contributor.author = accepted + +## demo: subject code autocomplete, using srsc as authority +## (DSpaceControlledVocabulary plugin must be enabled) +## Warning: when enabling this feature any controlled vocabulary configuration in the input-forms.xml for the metadata field will be overridden. +#vocabulary.plugin.srsc.hierarchy.store = true +#vocabulary.plugin.srsc.hierarchy.suggest = true +#vocabulary.plugin.srsc.delimiter = "::" + +# publisher name lookup through SHERPA/RoMEO: +#choices.plugin.dc.publisher = SRPublisher +#choices.presentation.dc.publisher = suggest + +## demo: journal title lookup, with ISSN as authority +#choices.plugin.dc.title.alternative = SRJournalTitle +#choices.presentation.dc.title.alternative = suggest +#authority.controlled.dc.title.alternative = true + +##### Authority Control Settings ##### +#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ +# org.dspace.content.authority.SampleAuthority = Sample, \ +# org.dspace.content.authority.SHERPARoMEOPublisher = SRPublisher, \ +# org.dspace.content.authority.SHERPARoMEOJournalTitle = SRJournalTitle, \ +# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority + +#Uncomment to enable ORCID authority control +#plugin.named.org.dspace.content.authority.ChoiceAuthority = \ +# org.dspace.content.authority.SolrAuthority = SolrAuthorAuthority + +plugin.named.org.dspace.content.authority.ChoiceAuthority = \ +org.dspace.content.authority.EPersonAuthority = EPersonAuthority + +# Configuration settings required for Researcher Profiles +# These settings ensure "dspace.object.owner" field are indexed by Authority Control +#choices.plugin.dspace.object.owner = EPersonAuthority +#choices.presentation.dspace.object.owner = suggest +#authority.controlled.dspace.object.owner = true \ No newline at end of file diff --git a/dspace/config/modules/external-providers.cfg b/dspace/config/modules/external-providers.cfg new file mode 100644 index 0000000000..ced5ef81c0 --- /dev/null +++ b/dspace/config/modules/external-providers.cfg @@ -0,0 +1,45 @@ +#---------------------------------------------------------------# +#------------- EXTERNAL PROVIDER CONFIGURATIONS ----------------# +#---------------------------------------------------------------# +# Configuration properties used solely by external providers # +# as Scopus, Pubmed, CiNii and ect. # +#---------------------------------------------------------------# + + +################################################################# +#---------------------- CrossRef ---------------------------# +#---------------------------------------------------------------# +crossref.url = https://api.crossref.org/works +################################################################# +#---------------------- VuFind -----------------------------# +#---------------------------------------------------------------# +vufind.url = https://vufind.org/advanced_demo/api/v1/record +vufind.url.search = https://vufind.org/advanced_demo/api/v1/search +################################################################# +#---------------------- Scielo -----------------------------# +#---------------------------------------------------------------# +scielo.url = https://search.scielo.org/?output=ris&q= +################################################################# +#---------- NASA Astrophysics Data System (ADS) -------------# +#---------------------------------------------------------------# +# +# follow link to get apikey: https://ui.adsabs.harvard.edu/help/api/ +ads.key = +ads.url = https://api.adsabs.harvard.edu/v1/search/query +# comma delimited set of fields to return; default is 'id' +ads.resultFieldList = abstract,ack,aff,alternate_bibcode,alternate_title,arxiv_class,author,bibcode,bibgroup,bibstem,citation_count,copyright,database,doi,doctype,first_author,grant,id,indexstamp,issue,keyword,lang,orcid_pub,orcid_user,orcid_other,page,property,pub,pubdate,read_count,title,vizier,volume,year +################################################################# +#--------- European Patent Office (EPO) --------------------# +#---------------------------------------------------------------# +# +# follow link to get apikey: https://developers.epo.org/ +epo.consumerKey = +epo.consumerSecretKey = + +# this URL will be used during authentication to get access token +epo.authUrl = https://ops.epo.org/3.2/auth/accesstoken +# this URL is used to performe specific query by epo document id & epo document type +epo.url = https://ops.epo.org/rest-services/published-data/publication/$(doctype)/$(id)/biblio +# this url will be used to performe basic searching +epo.searchUrl = https://ops.epo.org/rest-services/published-data/search +################################################################# \ No newline at end of file diff --git a/dspace/config/modules/researcher-profile.cfg b/dspace/config/modules/researcher-profile.cfg new file mode 100644 index 0000000000..afd6606a97 --- /dev/null +++ b/dspace/config/modules/researcher-profile.cfg @@ -0,0 +1,18 @@ +#---------------------------------------------------------------# +#------------------- PROFILE CONFIGURATIONS --------------------# +#---------------------------------------------------------------# + +# The Entity Type to use for the Researcher Profile. Defaults to "Person" as this is the recommended Entity Type to use +# (Requires reboot of servlet container, e.g. Tomcat, to reload) +#researcher-profile.entity-type = Person + +# The UUID of the default Collection where newly created Entities will be stored. If unspecified, the first Collection which supports "entity-type" will be used. +#researcher-profile.collection.uuid = + +# Whether or not to delete the Entity (Item) when a Profile is deleted. Default value is "false" which means that when a user deletes their profile, +# the Entity remains (retaining its data and relationships). When set to "true", the Entity (and its relationships) will be deleted if a user deletes their Profile. +researcher-profile.hard-delete.enabled = false + +# Whether a newly created profile should be visible by default. Default value is "false" which means a newly created profile is not readable to +# anonymous users. Setting to "true" means a newly created profile is immediately readable to anonymous users. +researcher-profile.set-new-profile-visible = false \ No newline at end of file diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 1b0895810b..d3abd71145 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -32,7 +32,11 @@ rest.projection.specificLevel.maxEmbed = 5 # (Requires reboot of servlet container, e.g. Tomcat, to reload) rest.properties.exposed = plugin.named.org.dspace.curate.CurationTask rest.properties.exposed = google.analytics.key +rest.properties.exposed = websvc.opensearch.enable rest.properties.exposed = versioning.item.history.include.submitter +rest.properties.exposed = researcher-profile.entity-type +rest.properties.exposed = websvc.opensearch.svccontext +rest.properties.exposed = submit.type-bind.field #---------------------------------------------------------------# # These configs are used by the deprecated REST (v4-6) module # diff --git a/dspace/config/registries/dspace-types.xml b/dspace/config/registries/dspace-types.xml index ab0c1bc40f..5a8ddff640 100644 --- a/dspace/config/registries/dspace-types.xml +++ b/dspace/config/registries/dspace-types.xml @@ -43,4 +43,12 @@ enabled Stores a boolean text value (true or false) to indicate if the iiif feature is enabled or not for the dspace object. If absent the value is derived from the parent dspace object + + + dspace + object + owner + Used to support researcher profiles + + diff --git a/dspace/config/spring/api/ads-integration.xml b/dspace/config/spring/api/ads-integration.xml new file mode 100644 index 0000000000..11bdc7cda2 --- /dev/null +++ b/dspace/config/spring/api/ads-integration.xml @@ -0,0 +1,116 @@ + + + + + + + Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + what metadatafield is generated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace/config/spring/api/cache.xml b/dspace/config/spring/api/cache.xml new file mode 100644 index 0000000000..6e755ebd3f --- /dev/null +++ b/dspace/config/spring/api/cache.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dspace/config/spring/api/core-factory-services.xml b/dspace/config/spring/api/core-factory-services.xml index 2712ad21d0..6aadb591f2 100644 --- a/dspace/config/spring/api/core-factory-services.xml +++ b/dspace/config/spring/api/core-factory-services.xml @@ -27,6 +27,7 @@ + diff --git a/dspace/config/spring/api/core-services.xml b/dspace/config/spring/api/core-services.xml index 591a4ef3f4..8af2e42236 100644 --- a/dspace/config/spring/api/core-services.xml +++ b/dspace/config/spring/api/core-services.xml @@ -63,6 +63,8 @@ + + - + + diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml new file mode 100644 index 0000000000..e01b613833 --- /dev/null +++ b/dspace/config/spring/api/crossref-integration.xml @@ -0,0 +1,141 @@ + + + + + + + + Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + what metadatafield is generated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index cc202dc0ac..25738ef1d3 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -107,6 +107,7 @@ + @@ -2005,6 +2006,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + search.entitytype:${researcher-profile.entity-type:Person} + + + + + + + diff --git a/dspace/config/spring/api/epo-integration.xml b/dspace/config/spring/api/epo-integration.xml new file mode 100644 index 0000000000..2509478706 --- /dev/null +++ b/dspace/config/spring/api/epo-integration.xml @@ -0,0 +1,136 @@ + + + + + + + Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + what metadatafield is generated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dspace/config/spring/api/external-services.xml b/dspace/config/spring/api/external-services.xml index 9e28e5d559..ba210d12f1 100644 --- a/dspace/config/spring/api/external-services.xml +++ b/dspace/config/spring/api/external-services.xml @@ -5,6 +5,8 @@ + + @@ -92,5 +94,64 @@ - + + + + + + + Publication + none + + + + + + + + + + Publication + none + + + + + + + + + + + Publication + none + + + + + + + + + + + Publication + none + + + + + + + + + + + Publication + none + + + + + \ No newline at end of file diff --git a/dspace/config/spring/api/scielo-integration.xml b/dspace/config/spring/api/scielo-integration.xml new file mode 100644 index 0000000000..05ee62c9c1 --- /dev/null +++ b/dspace/config/spring/api/scielo-integration.xml @@ -0,0 +1,109 @@ + + + + + + + Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + what metadatafield is generated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/dspace/config/spring/api/sherpa.xml b/dspace/config/spring/api/sherpa.xml index fb9e31b9a0..0414f3f8e4 100644 --- a/dspace/config/spring/api/sherpa.xml +++ b/dspace/config/spring/api/sherpa.xml @@ -33,4 +33,16 @@ + + + + + + + + + + + diff --git a/dspace/config/spring/api/solr-services.xml b/dspace/config/spring/api/solr-services.xml index 698a824184..80e9449d4c 100644 --- a/dspace/config/spring/api/solr-services.xml +++ b/dspace/config/spring/api/solr-services.xml @@ -31,5 +31,7 @@ + + diff --git a/dspace/config/spring/api/vufind-integration.xml b/dspace/config/spring/api/vufind-integration.xml new file mode 100644 index 0000000000..bc6c5def84 --- /dev/null +++ b/dspace/config/spring/api/vufind-integration.xml @@ -0,0 +1,165 @@ + + + + + + + Defines which metadatum is mapped on which metadatum. Note that while the key must be unique it + only matters here for postprocessing of the value. The mapped MetadatumContributor has full control over + what metadatafield is generated. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace/config/spring/rest/post-logged-in-actions.xml b/dspace/config/spring/rest/post-logged-in-actions.xml new file mode 100644 index 0000000000..1e02569647 --- /dev/null +++ b/dspace/config/spring/rest/post-logged-in-actions.xml @@ -0,0 +1,12 @@ + + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index e419bd8579..c3c0246974 100644 --- a/pom.xml +++ b/pom.xml @@ -19,9 +19,9 @@ 11 - 5.3.18 - 2.6.6 - 5.6.2 + 5.3.20 + 2.6.8 + 5.6.5 5.6.5.Final 6.0.23.Final 42.3.3