Merge branch 'CST-5306' into CST-5669

This commit is contained in:
Luca Giamminonni
2022-05-11 11:00:32 +02:00
83 changed files with 2025 additions and 706 deletions

View File

@@ -530,14 +530,6 @@
<groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
</dependency>
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</dependency>
<dependency>
<groupId>com.ibm.icu</groupId>
<artifactId>icu4j</artifactId>
@@ -687,13 +679,6 @@
<version>1.1.1</version>
</dependency>
<!-- Gson: Java to Json conversion -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Plugin interface for the access status calculation.
*/
public interface AccessStatusHelper {
/**
* Calculate the access status for the item.
*
* @param context the DSpace context
* @param item the item
* @return an access status value
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException;
}

View File

@@ -0,0 +1,66 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.service.PluginService;
import org.dspace.services.ConfigurationService;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation for the access status calculation service.
*/
public class AccessStatusServiceImpl implements AccessStatusService {
// Plugin implementation, set from the DSpace configuration by init().
protected AccessStatusHelper helper = null;
protected Date forever_date = null;
@Autowired(required = true)
protected ConfigurationService configurationService;
@Autowired(required = true)
protected PluginService pluginService;
/**
* Initialize the bean (after dependency injection has already taken place).
* Ensures the configurationService is injected, so that we can get the plugin
* and the forever embargo date threshold from the configuration.
* Called by "init-method" in Spring configuration.
*
* @throws Exception on generic exception
*/
public void init() throws Exception {
if (helper == null) {
helper = (AccessStatusHelper) pluginService.getSinglePlugin(AccessStatusHelper.class);
if (helper == null) {
throw new IllegalStateException("The AccessStatusHelper plugin was not defined in "
+ "DSpace configuration.");
}
// Defines the embargo forever date threshold for the access status.
// Look at EmbargoService.FOREVER for some improvements?
int year = configurationService.getIntProperty("access.status.embargo.forever.year");
int month = configurationService.getIntProperty("access.status.embargo.forever.month");
int day = configurationService.getIntProperty("access.status.embargo.forever.day");
forever_date = new LocalDate(year, month, day).toDate();
}
}
@Override
public String getAccessStatus(Context context, Item item) throws SQLException {
return helper.getAccessStatusFromItem(context, item, forever_date);
}
}

View File

@@ -0,0 +1,159 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
/**
* Default plugin implementation of the access status helper.
* The getAccessStatusFromItem method provides a simple logic to
* calculate the access status of an item based on the policies of
* the primary or the first bitstream in the original bundle.
* Users can override this method for enhanced functionality.
*/
public class DefaultAccessStatusHelper implements AccessStatusHelper {
public static final String EMBARGO = "embargo";
public static final String METADATA_ONLY = "metadata.only";
public static final String OPEN_ACCESS = "open.access";
public static final String RESTRICTED = "restricted";
public static final String UNKNOWN = "unknown";
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected AuthorizeService authorizeService =
AuthorizeServiceFactory.getInstance().getAuthorizeService();
public DefaultAccessStatusHelper() {
super();
}
/**
* Look at the item's policies to determine an access status value.
* It is also considering a date threshold for embargos and restrictions.
*
* If the item is null, simply returns the "unknown" value.
*
* @param context the DSpace context
* @param item the item to embargo
* @param threshold the embargo threshold date
* @return an access status value
*/
@Override
public String getAccessStatusFromItem(Context context, Item item, Date threshold)
throws SQLException {
if (item == null) {
return UNKNOWN;
}
// Consider only the original bundles.
List<Bundle> bundles = item.getBundles(Constants.DEFAULT_BUNDLE_NAME);
// Check for primary bitstreams first.
Bitstream bitstream = bundles.stream()
.map(bundle -> bundle.getPrimaryBitstream())
.filter(Objects::nonNull)
.findFirst()
.orElse(null);
if (bitstream == null) {
// If there is no primary bitstream,
// take the first bitstream in the bundles.
bitstream = bundles.stream()
.map(bundle -> bundle.getBitstreams())
.flatMap(List::stream)
.findFirst()
.orElse(null);
}
return caculateAccessStatusForDso(context, bitstream, threshold);
}
/**
* Look at the DSpace object's policies to determine an access status value.
*
* If the object is null, returns the "metadata.only" value.
* If any policy attached to the object is valid for the anonymous group,
* returns the "open.access" value.
* Otherwise, if the policy start date is before the embargo threshold date,
* returns the "embargo" value.
* Every other cases return the "restricted" value.
*
* @param context the DSpace context
* @param dso the DSpace object
* @param threshold the embargo threshold date
* @return an access status value
*/
private String caculateAccessStatusForDso(Context context, DSpaceObject dso, Date threshold)
throws SQLException {
if (dso == null) {
return METADATA_ONLY;
}
// Only consider read policies.
List<ResourcePolicy> policies = authorizeService
.getPoliciesActionFilter(context, dso, Constants.READ);
int openAccessCount = 0;
int embargoCount = 0;
int restrictedCount = 0;
int unknownCount = 0;
// Looks at all read policies.
for (ResourcePolicy policy : policies) {
boolean isValid = resourcePolicyService.isDateValid(policy);
Group group = policy.getGroup();
// The group must not be null here. However,
// if it is, consider this as an unexpected case.
if (group == null) {
unknownCount++;
} else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
// Only calculate the status for the anonymous group.
if (isValid) {
// If the policy is valid, the anonymous group have access
// to the bitstream.
openAccessCount++;
} else {
Date startDate = policy.getStartDate();
if (startDate != null && !startDate.before(threshold)) {
// If the policy start date have a value and if this value
// is equal or superior to the configured forever date, the
// access status is also restricted.
restrictedCount++;
} else {
// If the current date is not between the policy start date
// and end date, the access status is embargo.
embargoCount++;
}
}
}
}
if (openAccessCount > 0) {
return OPEN_ACCESS;
}
if (embargoCount > 0 && restrictedCount == 0) {
return EMBARGO;
}
if (unknownCount > 0) {
return UNKNOWN;
}
return RESTRICTED;
}
}

View File

@@ -0,0 +1,25 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public abstract class AccessStatusServiceFactory {
public abstract AccessStatusService getAccessStatusService();
public static AccessStatusServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("accessStatusServiceFactory", AccessStatusServiceFactory.class);
}
}

View File

@@ -0,0 +1,26 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.factory;
import org.dspace.access.status.service.AccessStatusService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the access status package,
* use AccessStatusServiceFactory.getInstance() to retrieve an implementation.
*/
public class AccessStatusServiceFactoryImpl extends AccessStatusServiceFactory {
@Autowired(required = true)
private AccessStatusService accessStatusService;
@Override
public AccessStatusService getAccessStatusService() {
return accessStatusService;
}
}

View File

@@ -0,0 +1,30 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
/**
* <p>
* Access status allows the users to view the bitstreams availability before
* browsing into the item itself.
* </p>
* <p>
* The access status is calculated through a pluggable class:
* {@link org.dspace.access.status.AccessStatusHelper}.
* The {@link org.dspace.access.status.AccessStatusServiceImpl}
* must be configured to specify this class, as well as a forever embargo date
* threshold year, month and day.
* </p>
* <p>
* See {@link org.dspace.access.status.DefaultAccessStatusHelper} for a simple calculation
* based on the primary or the first bitstream of the original bundle. You can
* supply your own class to implement more complex access statuses.
* </p>
* <p>
* For now, the access status is calculated when the item is shown in a list.
* </p>
*/
package org.dspace.access.status;

View File

@@ -0,0 +1,46 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status.service;
import java.sql.SQLException;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Public interface to the access status subsystem.
* <p>
* Configuration properties: (with examples)
* {@code
* # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to dermine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted.
* # You might want to change this threshold based on your needs. For example: some databases
* # doesn't accept a date superior to 31 december 9999.
* access.status.embargo.forever.year = 10000
* access.status.embargo.forever.month = 1
* access.status.embargo.forever.day = 1
* # implementation of access status helper plugin - replace with local implementation if applicable
* # This default access status helper provides an item status based on the policies of the primary
* # bitstream (or first bitstream in the original bundles if no primary file is specified).
* plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
* }
*/
public interface AccessStatusService {
/**
* Calculate the access status for an Item while considering the forever embargo date threshold.
*
* @param context the DSpace context
* @param item the item
* @throws SQLException An exception that provides information on a database access error or other errors.
*/
public String getAccessStatus(Context context, Item item) throws SQLException;
}

View File

@@ -11,13 +11,16 @@ import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -90,7 +93,7 @@ public class MetadataImporter {
public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException {
NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
// create an options object and populate it
CommandLineParser parser = new DefaultParser();
@@ -124,8 +127,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, TransformerException, ParserConfigurationException, AuthorizeException,
SAXException, NonUniqueMetadataException, RegistryImportException, XPathExpressionException {
Context context = null;
try {
@@ -137,7 +140,9 @@ public class MetadataImporter {
Document document = RegistryImporter.loadXML(file);
// Get the nodes corresponding to types
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList schemaNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-schema")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) {
@@ -146,7 +151,8 @@ public class MetadataImporter {
}
// Get the nodes corresponding to types
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
NodeList typeNodes = (NodeList) xPath.compile("/dspace-dc-types/dc-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -178,8 +184,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace");
@@ -236,8 +242,8 @@ public class MetadataImporter {
* @throws RegistryImportException if import fails
*/
private static void loadType(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, AuthorizeException, NonUniqueMetadataException, RegistryImportException,
XPathExpressionException {
// Get the values
String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element");

View File

@@ -13,8 +13,11 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
@@ -72,9 +75,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -115,9 +119,10 @@ public class RegistryImporter {
* @throws TransformerException if error
*/
public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -16,9 +16,12 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
@@ -122,12 +125,13 @@ public class RegistryLoader {
*/
public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
Document document = loadXML(filename);
// Get the nodes corresponding to formats
NodeList typeNodes = XPathAPI.selectNodeList(document,
"dspace-bitstream-types/bitstream-type");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList typeNodes = (NodeList) xPath.compile("dspace-bitstream-types/bitstream-type")
.evaluate(document, XPathConstants.NODESET);
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
@@ -151,8 +155,7 @@ public class RegistryLoader {
* @throws AuthorizeException if authorization error
*/
private static void loadFormat(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException {
throws SQLException, AuthorizeException, XPathExpressionException {
// Get the values
String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description");
@@ -231,9 +234,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws XPathExpressionException {
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
Node childNode = (Node) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODE);
if (childNode == null) {
// No child node, so no values
@@ -274,9 +278,10 @@ public class RegistryLoader {
* @throws TransformerException if transformer error
*/
private static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws XPathExpressionException {
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList childNodes = (NodeList) xPath.compile(childName).evaluate(parentElement, XPathConstants.NODESET);
String[] data = new String[childNodes.getLength()];

View File

@@ -30,6 +30,10 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -38,7 +42,6 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -138,7 +141,7 @@ public class StructBuilder {
*/
public static void main(String[] argv)
throws ParserConfigurationException, SQLException,
FileNotFoundException, IOException, TransformerException {
IOException, TransformerException, XPathExpressionException {
// Define command line options.
Options options = new Options();
@@ -240,7 +243,7 @@ public class StructBuilder {
* @throws SQLException
*/
static void importStructure(Context context, InputStream input, OutputStream output)
throws IOException, ParserConfigurationException, SQLException, TransformerException {
throws IOException, ParserConfigurationException, SQLException, TransformerException, XPathExpressionException {
// load the XML
Document document = null;
@@ -258,13 +261,15 @@ public class StructBuilder {
// is properly structured.
try {
validate(document);
} catch (TransformerException ex) {
} catch (XPathExpressionException ex) {
System.err.format("The input document is invalid: %s%n", ex.getMessage());
System.exit(1);
}
// Check for 'identifier' attributes -- possibly output by this class.
NodeList identifierNodes = XPathAPI.selectNodeList(document, "//*[@identifier]");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList identifierNodes = (NodeList) xPath.compile("//*[@identifier]")
.evaluate(document, XPathConstants.NODESET);
if (identifierNodes.getLength() > 0) {
System.err.println("The input document has 'identifier' attributes, which will be ignored.");
}
@@ -287,7 +292,8 @@ public class StructBuilder {
Element[] elements = new Element[]{};
try {
// get the top level community list
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
// run the import starting with the top level communities
elements = handleCommunities(context, first, null);
@@ -456,14 +462,16 @@ public class StructBuilder {
* @throws TransformerException if transformer error
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
err.append("The following errors were encountered parsing the source XML.\n");
err.append("No changes have been made to the DSpace instance.\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList first = (NodeList) xPath.compile("/import_structure/community")
.evaluate(document, XPathConstants.NODESET);
if (first.getLength() == 0) {
err.append("-There are no top level communities in the source document.");
System.out.println(err.toString());
@@ -493,14 +501,15 @@ public class StructBuilder {
* no errors.
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -510,7 +519,7 @@ public class StructBuilder {
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(n, XPathConstants.NODESET);
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null) {
err.append(comErrs);
@@ -518,7 +527,7 @@ public class StructBuilder {
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
NodeList collections = (NodeList) xPath.compile("collection").evaluate(n, XPathConstants.NODESET);
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null) {
err.append(colErrs);
@@ -542,14 +551,15 @@ public class StructBuilder {
* @return the errors to be generated by the calling method, or null if none
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
throws XPathExpressionException {
StringBuilder err = new StringBuilder();
boolean trip = false;
String errs = null;
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
NodeList name = (NodeList) xPath.compile("name").evaluate(n, XPathConstants.NODESET);
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level ").append(level)
@@ -613,8 +623,9 @@ public class StructBuilder {
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, AuthorizeException {
throws TransformerException, SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[communities.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < communities.getLength(); i++) {
Community community;
@@ -634,7 +645,7 @@ public class StructBuilder {
// now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, MetadataFieldName> entry : communityMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
communityService.setMetadataSingleValue(context, community,
entry.getValue(), null, getStringValue(nl.item(0)));
@@ -700,11 +711,11 @@ public class StructBuilder {
}
// handle sub communities
NodeList subCommunities = XPathAPI.selectNodeList(tn, "community");
NodeList subCommunities = (NodeList) xPath.compile("community").evaluate(tn, XPathConstants.NODESET);
Element[] subCommunityElements = handleCommunities(context, subCommunities, community);
// handle collections
NodeList collections = XPathAPI.selectNodeList(tn, "collection");
NodeList collections = (NodeList) xPath.compile("collection").evaluate(tn, XPathConstants.NODESET);
Element[] collectionElements = handleCollections(context, collections, community);
int j;
@@ -731,8 +742,9 @@ public class StructBuilder {
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException {
throws SQLException, AuthorizeException, XPathExpressionException {
Element[] elements = new Element[collections.getLength()];
XPath xPath = XPathFactory.newInstance().newXPath();
for (int i = 0; i < collections.getLength(); i++) {
Element element = new Element("collection");
@@ -745,7 +757,7 @@ public class StructBuilder {
// import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, MetadataFieldName> entry : collectionMap.entrySet()) {
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
NodeList nl = (NodeList) xPath.compile(entry.getKey()).evaluate(tn, XPathConstants.NODESET);
if (nl.getLength() == 1) {
collectionService.setMetadataSingleValue(context, collection,
entry.getValue(), null, getStringValue(nl.item(0)));

View File

@@ -23,7 +23,6 @@ public class ResourceAlreadyExistsException extends RuntimeException {
* existing resource.
*
* @param message the error message
* @param resource the resource that caused the conflict
*/
public ResourceAlreadyExistsException(String message) {
super(message);

View File

@@ -51,6 +51,10 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.collections4.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
@@ -59,7 +63,6 @@ import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.app.util.LocalSchemaFilenameFilter;
import org.dspace.app.util.RelationshipUtils;
@@ -863,7 +866,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// Load all metadata schemas into the item.
protected void loadMetadata(Context c, Item myitem, String path)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
// Load the dublin core metadata
loadDublinCore(c, myitem, path + "dublin_core.xml");
@@ -877,14 +880,15 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
protected void loadDublinCore(Context c, Item myitem, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
SAXException, TransformerException, AuthorizeException, XPathExpressionException {
Document document = loadXML(filename);
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import
// file
String schema;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem(
"schema");
if (schemaAttr == null) {
@@ -894,8 +898,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document,
"/dublin_core/dcvalue");
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
if (!isQuiet) {
System.out.println("\tLoading dublin core from " + filename);

View File

@@ -27,10 +27,12 @@ import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -170,24 +172,21 @@ public class MetadataUtilities {
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
throws IOException, XPathExpressionException, SAXException {
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
String schema;
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList metadata = (NodeList) xPath.compile("/dublin_core").evaluate(document, XPathConstants.NODESET);
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchemaEnum.DC.getName();
@@ -196,7 +195,7 @@ public class MetadataUtilities {
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
NodeList dcNodes = (NodeList) xPath.compile("/dublin_core/dcvalue").evaluate(document, XPathConstants.NODESET);
for (int i = 0; i < dcNodes.getLength(); i++) {
Node n = dcNodes.item(i);

View File

@@ -272,11 +272,6 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
return empty();
}
if (indexableObjects.size() > 1) {
log.warn("Multiple " + profileType + " type collections were found during profile creation");
return empty();
}
return ofNullable((Collection) indexableObjects.get(0).getIndexedObject());
}
@@ -296,7 +291,7 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
item = installItemService.installItem(context, workspaceItem);
if (isNewProfilePrivateByDefault()) {
if (isNewProfileNotVisibleByDefault()) {
Group anonymous = groupService.findByName(context, ANONYMOUS);
authorizeService.removeGroupPolicies(context, item, anonymous);
}
@@ -310,8 +305,8 @@ public class ResearcherProfileServiceImpl implements ResearcherProfileService {
return configurationService.getBooleanProperty("researcher-profile.hard-delete.enabled");
}
private boolean isNewProfilePrivateByDefault() {
return configurationService.getBooleanProperty("researcher-profile.set-new-profile-private");
private boolean isNewProfileNotVisibleByDefault() {
return !configurationService.getBooleanProperty("researcher-profile.set-new-profile-visible");
}
private void removeOwnerMetadata(Context context, Item profileItem) throws SQLException {

View File

@@ -67,7 +67,7 @@ public interface ResearcherProfileService {
/**
* Changes the visibility of the given profile using the given new visible
* value.
* value. The visiblity controls whether the Profile is Anonymous READ or not.
*
* @param context the relevant DSpace Context.
* @param profile the researcher profile to update

View File

@@ -561,6 +561,15 @@ public class DCInput {
return true;
}
/**
* Get the type bind list for use in determining whether
* to display this field in angular dynamic form building
* @return list of bound types
*/
public List<String> getTypeBindList() {
return typeBind;
}
/**
* Verify whether the current field contains an entity relationship
* This also implies a relationship type is defined for this field

View File

@@ -14,11 +14,12 @@ import java.util.Iterator;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.logging.log4j.Logger;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
@@ -62,36 +63,26 @@ public class XMLUtils {
/**
* @param xml The starting context (a Node or a Document, for example).
* @param NodeListXPath xpath
* @param nodeListXPath xpath
* @return A Node matches the NodeListXPath
* null if nothing matches the NodeListXPath
* @throws XPathExpressionException if xpath error
*/
public static Node getNode(Node xml, String NodeListXPath) throws XPathExpressionException {
Node result = null;
try {
result = XPathAPI.selectSingleNode(xml, NodeListXPath);
} catch (TransformerException e) {
log.error("Error", e);
}
return result;
public static Node getNode(Node xml, String nodeListXPath) throws XPathExpressionException {
XPath xPath = XPathFactory.newInstance().newXPath();
return (Node) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODE);
}
/**
* @param xml The starting context (a Node or a Document, for example).
* @param NodeListXPath xpath
* @param nodeListXPath xpath
* @return A NodeList containing the nodes that match the NodeListXPath
* null if nothing matches the NodeListXPath
* @throws XPathExpressionException if xpath error
*/
public static NodeList getNodeList(Node xml, String NodeListXPath) throws XPathExpressionException {
NodeList nodeList = null;
try {
nodeList = XPathAPI.selectNodeList(xml, NodeListXPath);
} catch (TransformerException e) {
log.error("Error", e);
}
return nodeList;
public static NodeList getNodeList(Node xml, String nodeListXPath) throws XPathExpressionException {
XPath xPath = XPathFactory.newInstance().newXPath();
return (NodeList) xPath.compile(nodeListXPath).evaluate(xml, XPathConstants.NODESET);
}
public static Iterator<Node> getNodeListIterator(Node xml, String NodeListXPath) throws XPathExpressionException {

View File

@@ -130,12 +130,6 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin {
return aliasList.toArray(new String[aliasList.size()]);
}
/**
* We need to force this, because some dependency elsewhere interferes.
*/
private static final String TRANSFORMER_FACTORY_CLASS
= "com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl";
private Transformer transformer = null;
private File transformFile = null;
private long transformLastModified = 0;
@@ -181,8 +175,7 @@ public abstract class XSLTCrosswalk extends SelfNamedPlugin {
Source transformSource
= new StreamSource(new FileInputStream(transformFile));
TransformerFactory transformerFactory
= TransformerFactory.newInstance(
TRANSFORMER_FACTORY_CLASS, null);
= TransformerFactory.newInstance();
transformer = transformerFactory.newTransformer(transformSource);
transformLastModified = transformFile.lastModified();
} catch (TransformerConfigurationException | FileNotFoundException e) {

View File

@@ -1174,7 +1174,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
//DO NOT ESCAPE RANGE QUERIES !
if (!value.matches("\\[.*TO.*\\]")) {
value = ClientUtils.escapeQueryChars(value);
filterQuery.append("(").append(value).append(")");
filterQuery.append("\"").append(value).append("\"");
} else {
filterQuery.append(value);
}

View File

@@ -23,7 +23,7 @@ public class CanvasCacheEvictService {
CacheManager cacheManager;
public void evictSingleCacheValue(String cacheKey) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey);
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey);
}
}

View File

@@ -26,11 +26,11 @@ public class ManifestsCacheEvictService {
CacheManager cacheManager;
public void evictSingleCacheValue(String cacheKey) {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evict(cacheKey);
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).evictIfPresent(cacheKey);
}
public void evictAllCacheValues() {
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).clear();
Objects.requireNonNull(cacheManager.getCache(CACHE_NAME)).invalidate();
}
}

View File

@@ -1,70 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics;
import java.util.ArrayList;
import java.util.List;
import com.google.gson.Gson;
/**
* A neutral data object to hold data for statistics.
*/
public class DataTermsFacet {
private List<TermsFacet> terms;
public DataTermsFacet() {
terms = new ArrayList<TermsFacet>();
}
public void addTermFacet(TermsFacet termsFacet) {
terms.add(termsFacet);
}
/**
* Render this data object into JSON format.
*
* An example of the output could be of the format:
* [{"term":"247166","count":10},{"term":"247168","count":6}]
*
* @return JSON-formatted data.
*/
public String toJson() {
Gson gson = new Gson();
return gson.toJson(terms);
}
public static class TermsFacet {
private String term;
private Integer count;
public TermsFacet(String term, Integer count) {
setTerm(term);
setCount(count);
}
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
}
}

View File

@@ -12,6 +12,7 @@ import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPathExpressionException;
import org.dspace.administer.MetadataImporter;
import org.dspace.administer.RegistryImportException;
@@ -89,7 +90,7 @@ public class RegistryUpdater implements Callback {
} catch (IOException | SQLException | ParserConfigurationException
| TransformerException | RegistryImportException
| AuthorizeException | NonUniqueMetadataException
| SAXException e) {
| SAXException | XPathExpressionException e) {
log.error("Error attempting to update Bitstream Format and/or Metadata Registries", e);
throw new RuntimeException("Error attempting to update Bitstream Format and/or Metadata Registries", e);
} finally {

View File

@@ -64,12 +64,6 @@ public class SubmissionFormsMigration extends DSpaceRunnable<SubmissionFormsMigr
"<!ELEMENT input-forms (form-map, form-definitions, form-value-pairs) >";
private List<File> tempFiles = new ArrayList<>();
/**
* We need to force this, because some dependency elsewhere interferes.
*/
private static final String TRANSFORMER_FACTORY_CLASS
= "org.apache.xalan.processor.TransformerFactoryImpl";
@Override
public void internalRun() throws TransformerException {
if (help) {
@@ -101,8 +95,7 @@ public class SubmissionFormsMigration extends DSpaceRunnable<SubmissionFormsMigr
Result result = new StreamResult(new File(outputPath));
// Create an instance of TransformerFactory
TransformerFactory transformerFactory = TransformerFactory.newInstance(
TRANSFORMER_FACTORY_CLASS, null);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer trans;
try {

View File

@@ -15,8 +15,11 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.xpath.XPathAPI;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.w3c.dom.Document;
@@ -56,7 +59,7 @@ public class ControlledVocabulary {
* TODO: add some caching !
*/
public static ControlledVocabulary loadVocabulary(String fileName)
throws IOException, SAXException, ParserConfigurationException, TransformerException {
throws IOException, SAXException, ParserConfigurationException, XPathExpressionException {
StringBuilder filePath = new StringBuilder();
ConfigurationService configurationService
= DSpaceServicesFactory.getInstance().getConfigurationService();
@@ -70,7 +73,9 @@ public class ControlledVocabulary {
if (controlledVocFile.exists()) {
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document document = builder.parse(controlledVocFile);
return loadVocabularyNode(XPathAPI.selectSingleNode(document, "node"), "");
XPath xPath = XPathFactory.newInstance().newXPath();
Node node = (Node) xPath.compile("node").evaluate(document, XPathConstants.NODE);
return loadVocabularyNode(node, "");
} else {
return null;
}
@@ -85,7 +90,8 @@ public class ControlledVocabulary {
* @return a vocabulary node with all its children
* @throws TransformerException should something go wrong with loading the xml
*/
private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue) throws TransformerException {
private static ControlledVocabulary loadVocabularyNode(Node node, String initialValue)
throws XPathExpressionException {
Node idNode = node.getAttributes().getNamedItem("id");
String id = null;
if (idNode != null) {
@@ -102,7 +108,9 @@ public class ControlledVocabulary {
} else {
value = label;
}
NodeList subNodes = XPathAPI.selectNodeList(node, "isComposedBy/node");
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList subNodes = (NodeList) xPath.compile("isComposedBy/node").evaluate(node,
XPathConstants.NODESET);
List<ControlledVocabulary> subVocabularies = new ArrayList<>(subNodes.getLength());
for (int i = 0; i < subNodes.getLength(); i++) {

View File

@@ -144,3 +144,5 @@ authentication-ip.Student = 6.6.6.6
useProxies = true
proxies.trusted.ipranges = 7.7.7.7
proxies.trusted.include_ui_ip = true
researcher-profile.entity-type = Person

View File

@@ -140,6 +140,7 @@
<dc-qualifier>ispartofseries</dc-qualifier>
<repeatable>true</repeatable>
<label>Series/Report No.</label>
<type-bind>Technical Report</type-bind>
<input-type>series</input-type>
<hint>Enter the series and number assigned to this item by your community.</hint>
<required></required>

View File

@@ -0,0 +1,126 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
import java.sql.SQLException;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.access.status.factory.AccessStatusServiceFactory;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Unit Tests for access status service
*/
public class AccessStatusServiceTest extends AbstractUnitTest {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AccessStatusServiceTest.class);
private Collection collection;
private Community owningCommunity;
private Item item;
protected CommunityService communityService =
ContentServiceFactory.getInstance().getCommunityService();
protected CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected WorkspaceItemService workspaceItemService =
ContentServiceFactory.getInstance().getWorkspaceItemService();
protected InstallItemService installItemService =
ContentServiceFactory.getInstance().getInstallItemService();
protected AccessStatusService accessStatusService =
AccessStatusServiceFactory.getInstance().getAccessStatusService();
/**
* This method will be run before every test as per @Before. It will
* initialize resources required for the tests.
*
* Other methods can be annotated with @Before here or in subclasses
* but no execution order is guaranteed
*/
@Before
@Override
public void init() {
super.init();
try {
context.turnOffAuthorisationSystem();
owningCommunity = communityService.create(null, context);
collection = collectionService.create(context, owningCommunity);
item = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
} catch (SQLException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
}
}
/**
* This method will be run after every test as per @After. It will
* clean resources initialized by the @Before methods.
*
* Other methods can be annotated with @After here or in subclasses
* but no execution order is guaranteed
*/
@After
@Override
public void destroy() {
context.turnOffAuthorisationSystem();
try {
itemService.delete(context, item);
} catch (Exception e) {
// ignore
}
try {
collectionService.delete(context, collection);
} catch (Exception e) {
// ignore
}
try {
communityService.delete(context, owningCommunity);
} catch (Exception e) {
// ignore
}
context.restoreAuthSystemState();
item = null;
collection = null;
owningCommunity = null;
try {
super.destroy();
} catch (Exception e) {
// ignore
}
}
@Test
public void testGetAccessStatus() throws Exception {
String status = accessStatusService.getAccessStatus(context, item);
assertNotEquals("testGetAccessStatus 0", status, DefaultAccessStatusHelper.UNKNOWN);
}
}

View File

@@ -0,0 +1,423 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.access.status;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Constants;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.joda.time.LocalDate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class DefaultAccessStatusHelperTest extends AbstractUnitTest {
private static final Logger log = LogManager.getLogger(DefaultAccessStatusHelperTest.class);
private Collection collection;
private Community owningCommunity;
private Item itemWithoutBundle;
private Item itemWithoutBitstream;
private Item itemWithBitstream;
private Item itemWithEmbargo;
private Item itemWithDateRestriction;
private Item itemWithGroupRestriction;
private Item itemWithoutPolicy;
private Item itemWithoutPrimaryBitstream;
private Item itemWithPrimaryAndMultipleBitstreams;
private Item itemWithoutPrimaryAndMultipleBitstreams;
private DefaultAccessStatusHelper helper;
private Date threshold;
protected CommunityService communityService =
ContentServiceFactory.getInstance().getCommunityService();
protected CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
protected ItemService itemService =
ContentServiceFactory.getInstance().getItemService();
protected WorkspaceItemService workspaceItemService =
ContentServiceFactory.getInstance().getWorkspaceItemService();
protected InstallItemService installItemService =
ContentServiceFactory.getInstance().getInstallItemService();
protected BundleService bundleService =
ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService =
ContentServiceFactory.getInstance().getBitstreamService();
protected ResourcePolicyService resourcePolicyService =
AuthorizeServiceFactory.getInstance().getResourcePolicyService();
protected GroupService groupService =
EPersonServiceFactory.getInstance().getGroupService();
/**
* This method will be run before every test as per @Before. It will
* initialize resources required for the tests.
*
* Other methods can be annotated with @Before here or in subclasses
* but no execution order is guaranteed
*/
@Before
@Override
public void init() {
super.init();
try {
context.turnOffAuthorisationSystem();
owningCommunity = communityService.create(null, context);
collection = collectionService.create(context, owningCommunity);
itemWithoutBundle = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithEmbargo = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithDateRestriction = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithGroupRestriction = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPolicy = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPrimaryBitstream = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithPrimaryAndMultipleBitstreams = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
itemWithoutPrimaryAndMultipleBitstreams = installItemService.installItem(context,
workspaceItemService.create(context, collection, true));
context.restoreAuthSystemState();
} catch (AuthorizeException ex) {
log.error("Authorization Error in init", ex);
fail("Authorization Error in init: " + ex.getMessage());
} catch (SQLException ex) {
log.error("SQL Error in init", ex);
fail("SQL Error in init: " + ex.getMessage());
}
helper = new DefaultAccessStatusHelper();
threshold = new LocalDate(10000, 1, 1).toDate();
}
/**
* This method will be run after every test as per @After. It will
* clean resources initialized by the @Before methods.
*
* Other methods can be annotated with @After here or in subclasses
* but no execution order is guaranteed
*/
@After
@Override
public void destroy() {
context.turnOffAuthorisationSystem();
try {
itemService.delete(context, itemWithoutBundle);
itemService.delete(context, itemWithoutBitstream);
itemService.delete(context, itemWithBitstream);
itemService.delete(context, itemWithEmbargo);
itemService.delete(context, itemWithDateRestriction);
itemService.delete(context, itemWithGroupRestriction);
itemService.delete(context, itemWithoutPolicy);
itemService.delete(context, itemWithoutPrimaryBitstream);
itemService.delete(context, itemWithPrimaryAndMultipleBitstreams);
itemService.delete(context, itemWithoutPrimaryAndMultipleBitstreams);
} catch (Exception e) {
// ignore
}
try {
collectionService.delete(context, collection);
} catch (Exception e) {
// ignore
}
try {
communityService.delete(context, owningCommunity);
} catch (Exception e) {
// ignore
}
context.restoreAuthSystemState();
itemWithoutBundle = null;
itemWithoutBitstream = null;
itemWithBitstream = null;
itemWithEmbargo = null;
itemWithDateRestriction = null;
itemWithGroupRestriction = null;
itemWithoutPolicy = null;
itemWithoutPrimaryBitstream = null;
itemWithPrimaryAndMultipleBitstreams = null;
itemWithoutPrimaryAndMultipleBitstreams = null;
collection = null;
owningCommunity = null;
helper = null;
threshold = null;
communityService = null;
collectionService = null;
itemService = null;
workspaceItemService = null;
installItemService = null;
bundleService = null;
bitstreamService = null;
resourcePolicyService = null;
groupService = null;
try {
super.destroy();
} catch (Exception e) {
// ignore
}
}
/**
* Test for a null item
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithNullItem() throws Exception {
String status = helper.getAccessStatusFromItem(context, null, threshold);
assertThat("testWithNullItem 0", status, equalTo(DefaultAccessStatusHelper.UNKNOWN));
}
/**
* Test for an item with no bundle
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutBundle() throws Exception {
String status = helper.getAccessStatusFromItem(context, itemWithoutBundle, threshold);
assertThat("testWithoutBundle 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY));
}
/**
* Test for an item with no bitstream
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutBitstream() throws Exception {
context.turnOffAuthorisationSystem();
bundleService.create(context, itemWithoutBitstream, Constants.CONTENT_BUNDLE_NAME);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutBitstream, threshold);
assertThat("testWithoutBitstream 0", status, equalTo(DefaultAccessStatusHelper.METADATA_ONLY));
}
/**
* Test for an item with a basic bitstream (open access)
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithBitstream() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithBitstream, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithBitstream, threshold);
assertThat("testWithBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
/**
* Test for an item with an embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithEmbargo() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithEmbargo, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithEmbargo, threshold);
assertThat("testWithEmbargo 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
}
/**
* Test for an item with an anonymous date restriction
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithDateRestriction() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithDateRestriction, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Restriction");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(10000, 1, 1).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithDateRestriction, threshold);
assertThat("testWithDateRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with a group restriction
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithGroupRestriction() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithGroupRestriction, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Restriction");
Group group = groupService.findByName(context, Group.ADMIN);
policy.setGroup(group);
policy.setAction(Constants.READ);
policies.add(policy);
authorizeService.removeAllPolicies(context, bitstream);
authorizeService.addPolicies(context, policies, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithGroupRestriction, threshold);
assertThat("testWithGroupRestriction 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with no policy
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutPolicy() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPolicy, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "primary");
bundle.setPrimaryBitstreamID(bitstream);
authorizeService.removeAllPolicies(context, bitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPolicy, threshold);
assertThat("testWithoutPolicy 0", status, equalTo(DefaultAccessStatusHelper.RESTRICTED));
}
/**
* Test for an item with no primary bitstream
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithoutPrimaryBitstream() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPrimaryBitstream, Constants.CONTENT_BUNDLE_NAME);
Bitstream bitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bitstream.setName(context, "first");
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryBitstream, threshold);
assertThat("testWithoutPrimaryBitstream 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
/**
* Test for an item with an open access bitstream
* and another primary bitstream on embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithPrimaryAndMultipleBitstreams() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithPrimaryAndMultipleBitstreams,
Constants.CONTENT_BUNDLE_NAME);
bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
Bitstream primaryBitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
bundle.setPrimaryBitstreamID(primaryBitstream);
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, primaryBitstream);
authorizeService.addPolicies(context, policies, primaryBitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.EMBARGO));
}
/**
* Test for an item with an open access bitstream
* and another bitstream on embargo
* @throws java.lang.Exception passed through.
*/
@Test
public void testWithNoPrimaryAndMultipleBitstreams() throws Exception {
context.turnOffAuthorisationSystem();
Bundle bundle = bundleService.create(context, itemWithoutPrimaryAndMultipleBitstreams,
Constants.CONTENT_BUNDLE_NAME);
bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
Bitstream anotherBitstream = bitstreamService.create(context, bundle,
new ByteArrayInputStream("1".getBytes(StandardCharsets.UTF_8)));
List<ResourcePolicy> policies = new ArrayList<>();
ResourcePolicy policy = resourcePolicyService.create(context);
policy.setRpName("Embargo");
Group group = groupService.findByName(context, Group.ANONYMOUS);
policy.setGroup(group);
policy.setAction(Constants.READ);
policy.setStartDate(new LocalDate(9999, 12, 31).toDate());
policies.add(policy);
authorizeService.removeAllPolicies(context, anotherBitstream);
authorizeService.addPolicies(context, policies, anotherBitstream);
context.restoreAuthSystemState();
String status = helper.getAccessStatusFromItem(context, itemWithoutPrimaryAndMultipleBitstreams, threshold);
assertThat("testWithNoPrimaryAndMultipleBitstreams 0", status, equalTo(DefaultAccessStatusHelper.OPEN_ACCESS));
}
}

View File

@@ -11,7 +11,8 @@ import java.sql.SQLException;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.core.Context;
@@ -26,7 +27,7 @@ import org.dspace.versioning.service.VersioningService;
*/
public class VersionBuilder extends AbstractBuilder<Version, VersioningService> {
private static final Logger log = Logger.getLogger(VersionBuilder.class);
private static final Logger log = LogManager.getLogger(VersionBuilder.class);
private Version version;

View File

@@ -12,14 +12,11 @@ import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
@@ -35,7 +32,6 @@ import org.dspace.app.iiif.model.generator.ContentAsTextGenerator;
import org.dspace.app.iiif.model.generator.ManifestGenerator;
import org.dspace.app.iiif.model.generator.SearchResultGenerator;
import org.dspace.app.iiif.service.utils.IIIFUtils;
import org.dspace.discovery.SolrSearchCore;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -66,9 +62,6 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
@Autowired
SearchResultGenerator searchResult;
@Autowired
SolrSearchCore solrSearchCore;
@Autowired
ManifestGenerator manifestGenerator;
@@ -167,26 +160,49 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
private String getAnnotationList(UUID uuid, String json, String query) {
searchResult.setIdentifier(manifestId + "/search?q="
+ URLEncoder.encode(query, StandardCharsets.UTF_8));
GsonBuilder builder = new GsonBuilder();
Gson gson = builder.create();
JsonObject body = gson.fromJson(json, JsonObject.class);
ObjectMapper mapper = new ObjectMapper();
JsonNode body = null;
try {
body = mapper.readTree(json);
} catch (JsonProcessingException e) {
log.error("Unable to process json response.", e);
}
// If error occurred or no body, return immediately
if (body == null) {
log.warn("Unable to process json response.");
return utils.asJson(searchResult.generateResource());
}
// outer ocr highlight element
JsonObject highs = body.getAsJsonObject("ocrHighlighting");
// highlight entries
for (Map.Entry<String, JsonElement> ocrIds: highs.entrySet()) {
// ocr_text
JsonObject ocrObj = ocrIds.getValue().getAsJsonObject().getAsJsonObject("ocr_text");
// snippets array
if (ocrObj != null) {
for (JsonElement snippetArray : ocrObj.getAsJsonObject().get("snippets").getAsJsonArray()) {
String pageId = getCanvasId(snippetArray.getAsJsonObject().get("pages"));
for (JsonElement highlights : snippetArray.getAsJsonObject().getAsJsonArray("highlights")) {
for (JsonElement highlight : highlights.getAsJsonArray()) {
searchResult.addResource(getAnnotation(highlight, pageId, uuid));
// Example structure of Solr response available at
// https://github.com/dbmdz/solr-ocrhighlighting/blob/main/docs/query.md
// Get the outer ocrHighlighting node
JsonNode highs = body.get("ocrHighlighting");
if (highs != null) {
// Loop through each highlight entry under ocrHighlighting
for (final JsonNode highEntry : highs) {
// Get the ocr_text node under the entry
JsonNode ocrNode = highEntry.get("ocr_text");
if (ocrNode != null) {
// Loop through the snippets array under that
for (final JsonNode snippet : ocrNode.get("snippets")) {
if (snippet != null) {
// Get a canvas ID based on snippet's pages
String pageId = getCanvasId(snippet.get("pages"));
if (pageId != null) {
// Loop through array of highlights for each snippet.
for (final JsonNode highlights : snippet.get("highlights")) {
if (highlights != null) {
// May be multiple word highlights on a page, so loop through them.
for (int i = 0; i < highlights.size(); i++) {
// Add annotation associated with each highlight
AnnotationGenerator anno = getAnnotation(highlights.get(i), pageId, uuid);
if (anno != null) {
searchResult.addResource(anno);
}
}
}
}
}
}
}
}
@@ -198,22 +214,25 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
/**
* Returns the annotation generator for the highlight.
* @param highlight highlight element from solor response
* @param highlight highlight node from Solr response
* @param pageId page id from solr response
* @return generator for a single annotation
*/
private AnnotationGenerator getAnnotation(JsonElement highlight, String pageId, UUID uuid) {
JsonObject hcoords = highlight.getAsJsonObject();
String text = (hcoords.get("text").getAsString());
int ulx = hcoords.get("ulx").getAsInt();
int uly = hcoords.get("uly").getAsInt();
int lrx = hcoords.get("lrx").getAsInt();
int lry = hcoords.get("lry").getAsInt();
String w = Integer.toString(lrx - ulx);
String h = Integer.toString(lry - uly);
private AnnotationGenerator getAnnotation(JsonNode highlight, String pageId, UUID uuid) {
String text = highlight.get("text") != null ? highlight.get("text").asText() : null;
int ulx = highlight.get("ulx") != null ? highlight.get("ulx").asInt() : -1;
int uly = highlight.get("uly") != null ? highlight.get("uly").asInt() : -1;
int lrx = highlight.get("lrx") != null ? highlight.get("lrx").asInt() : -1;
int lry = highlight.get("lry") != null ? highlight.get("lry").asInt() : -1;
String w = (lrx >= 0 && ulx >= 0) ? Integer.toString(lrx - ulx) : null;
String h = (lry >= 0 && uly >= 0) ? Integer.toString(lry - uly) : null;
if (text != null && w != null && h != null) {
String params = ulx + "," + uly + "," + w + "," + h;
return createSearchResultAnnotation(params, text, pageId, uuid);
}
return null;
}
/**
* Returns position of canvas. Uses the "pages" id attribute.
@@ -221,16 +240,23 @@ public class WordHighlightSolrSearch implements SearchAnnotationService {
* delimited with a "." and that the integer corresponds to the
* canvas identifier in the manifest. For METS/ALTO documents, the page
* order can be derived from the METS file when loading the solr index.
* @param element the pages element
* @return canvas id
* @param pagesNode the pages node
* @return canvas id or null if node was null
*/
private String getCanvasId(JsonElement element) {
JsonArray pages = element.getAsJsonArray();
JsonObject page = pages.get(0).getAsJsonObject();
String[] identArr = page.get("id").getAsString().split("\\.");
private String getCanvasId(JsonNode pagesNode) {
if (pagesNode != null) {
JsonNode page = pagesNode.get(0);
if (page != null) {
JsonNode pageId = page.get("id");
if (pageId != null) {
String[] identArr = pageId.asText().split("\\.");
// the canvas id.
return "c" + identArr[1];
}
}
}
return null;
}
/**
* Creates annotation with word highlight coordinates.

View File

@@ -82,10 +82,6 @@
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
</exclusion>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>

View File

@@ -22,6 +22,7 @@ import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
public class DSpaceResourceResolver implements ResourceResolver {
// Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions
private static final TransformerFactory transformerFactory = TransformerFactory
.newInstance("net.sf.saxon.TransformerFactoryImpl", null);

View File

@@ -19,6 +19,7 @@ import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.IOUtils;
public abstract class AbstractXSLTest {
// Requires usage of Saxon as OAI-PMH uses some XSLT 2 functions
private static final TransformerFactory factory = TransformerFactory
.newInstance("net.sf.saxon.TransformerFactoryImpl", null);

View File

@@ -8,17 +8,20 @@
package org.dspace.app.rest.authorization.impl;
import java.sql.SQLException;
import java.util.Objects;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.app.rest.authorization.AuthorizationFeature;
import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
import org.dspace.app.rest.model.BaseObjectRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -32,33 +35,41 @@ import org.springframework.stereotype.Component;
*/
@Component
@AuthorizationFeatureDocumentation(name = CanClaimItemFeature.NAME,
description = "Used to verify if the given user can request the claim of an item")
description = "Used to verify if the current user is able to claim this item as their profile. "
+ "Only available if the current item is not already claimed.")
public class CanClaimItemFeature implements AuthorizationFeature {
public static final String NAME = "canClaimItem";
private static final Logger LOG = LoggerFactory.getLogger(CanClaimItemFeature.class);
@Autowired
private ItemService itemService;
@Autowired
private ShowClaimItemFeature showClaimItemFeature;
private ResearcherProfileService researcherProfileService;
@Override
@SuppressWarnings("rawtypes")
public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException {
if (!showClaimItemFeature.isAuthorized(context, object)) {
return false;
}
if (!(object instanceof ItemRest) || Objects.isNull(context.getCurrentUser())) {
if (!(object instanceof ItemRest) || context.getCurrentUser() == null) {
return false;
}
String id = ((ItemRest) object).getId();
Item item = itemService.find(context, UUID.fromString(id));
return hasNotOwner(item);
return researcherProfileService.hasProfileType(item) && hasNotOwner(item) && hasNotAlreadyAProfile(context);
}
private boolean hasNotAlreadyAProfile(Context context) {
try {
return researcherProfileService.findById(context, context.getCurrentUser().getID()) == null;
} catch (SQLException | AuthorizeException e) {
LOG.warn("Error while checking if eperson has a ResearcherProfileAssociated: {}", e.getMessage(), e);
return false;
}
}
private boolean hasNotOwner(Item item) {

View File

@@ -1,80 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.authorization.impl;
import java.sql.SQLException;
import java.util.Objects;
import java.util.UUID;
import org.dspace.app.profile.service.ResearcherProfileService;
import org.dspace.app.rest.authorization.AuthorizationFeature;
import org.dspace.app.rest.authorization.AuthorizationFeatureDocumentation;
import org.dspace.app.rest.model.BaseObjectRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Checks if the given user can request the claim of an item. Whether or not the
* user can then make the claim is determined by the feature
* {@link CanClaimItemFeature}.
*
* @author Corrado Lombardi (corrado.lombardi at 4science.it)
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/
@Component
@AuthorizationFeatureDocumentation(name = ShowClaimItemFeature.NAME,
description = "Used to verify if the given user can request the claim of an item")
public class ShowClaimItemFeature implements AuthorizationFeature {
public static final String NAME = "showClaimItem";
private static final Logger LOG = LoggerFactory.getLogger(ShowClaimItemFeature.class);
private final ItemService itemService;
private final ResearcherProfileService researcherProfileService;
@Autowired
public ShowClaimItemFeature(ItemService itemService, ResearcherProfileService researcherProfileService) {
this.itemService = itemService;
this.researcherProfileService = researcherProfileService;
}
@Override
@SuppressWarnings("rawtypes")
public boolean isAuthorized(Context context, BaseObjectRest object) throws SQLException {
if (!(object instanceof ItemRest) || Objects.isNull(context.getCurrentUser())) {
return false;
}
String id = ((ItemRest) object).getId();
Item item = itemService.find(context, UUID.fromString(id));
return researcherProfileService.hasProfileType(item) && hasNotAlreadyAProfile(context);
}
private boolean hasNotAlreadyAProfile(Context context) {
try {
return researcherProfileService.findById(context, context.getCurrentUser().getID()) == null;
} catch (SQLException | AuthorizeException e) {
LOG.warn("Error while checking if eperson has a ResearcherProfileAssociated: {}", e.getMessage(), e);
return false;
}
}
@Override
public String[] getSupportedTypes() {
return new String[] {ItemRest.CATEGORY + "." + ItemRest.NAME};
}
}

View File

@@ -155,6 +155,7 @@ public class SubmissionFormConverter implements DSpaceConverter<DCInputSet, Subm
inputField.setInput(inputRest);
if (dcinput.isMetadataField()) {
inputField.setSelectableMetadata(selectableMetadata);
inputField.setTypeBind(dcinput.getTypeBindList());
}
if (dcinput.isRelationshipField()) {
selectableRelationship = getSelectableRelationships(dcinput);

View File

@@ -116,7 +116,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
HttpServletResponse.SC_METHOD_NOT_ALLOWED);
}
@ExceptionHandler( {UnprocessableEntityException.class})
@ExceptionHandler({ UnprocessableEntityException.class, ResourceAlreadyExistsException.class })
protected void handleUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response,
Exception ex) throws IOException {
//422 is not defined in HttpServletResponse. Its meaning is "Unprocessable Entity".
@@ -167,12 +167,6 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
HttpStatus.BAD_REQUEST.value());
}
@ExceptionHandler(ResourceAlreadyExistsException.class)
protected void resourceConflictException(HttpServletRequest request, HttpServletResponse response,
ResourceAlreadyExistsException ex) throws IOException {
sendErrorResponse(request, response, null, ex.getMessage(), HttpStatus.UNPROCESSABLE_ENTITY.value());
}
@ExceptionHandler(MissingParameterException.class)
protected void MissingParameterException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException {

View File

@@ -74,12 +74,12 @@ public class ResearcherProfileAutomaticClaim implements PostLoggedInAction {
@Override
public void loggedIn(Context context) {
EPerson currentUser = context.getCurrentUser();
if (currentUser == null) {
if (isBlank(researcherProfileService.getProfileType())) {
return;
}
if (isBlank(researcherProfileService.getProfileType())) {
EPerson currentUser = context.getCurrentUser();
if (currentUser == null) {
return;
}

View File

@@ -0,0 +1,49 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonProperty.Access;
/**
* The Access Status REST Resource.
*/
public class AccessStatusRest implements RestModel {
public static final String NAME = "accessStatus";
String status;
@Override
@JsonProperty(access = Access.READ_ONLY)
public String getType() {
return NAME;
}
@Override
@JsonIgnore
public String getTypePlural() {
return getType();
}
public AccessStatusRest() {
setStatus(null);
}
public AccessStatusRest(String status) {
setStatus(status);
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
}

View File

@@ -17,6 +17,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
@LinksRest(links = {
@LinkRest(
name = ItemRest.ACCESS_STATUS,
method = "getAccessStatus"
),
@LinkRest(
name = ItemRest.BUNDLES,
method = "getBundles"
@@ -51,6 +55,7 @@ public class ItemRest extends DSpaceObjectRest {
public static final String PLURAL_NAME = "items";
public static final String CATEGORY = RestAddressableModel.CORE;
public static final String ACCESS_STATUS = "accessStatus";
public static final String BUNDLES = "bundles";
public static final String MAPPED_COLLECTIONS = "mappedCollections";
public static final String OWNING_COLLECTION = "owningCollection";

View File

@@ -83,6 +83,11 @@ public class SubmissionFormFieldRest {
*/
private List<LanguageFormField> languageCodes;
/**
* The list of type bind value
*/
private List<String> typeBind;
/**
* Getter for {@link #selectableMetadata}
*
@@ -266,6 +271,14 @@ public class SubmissionFormFieldRest {
}
}
public List<String> getTypeBind() {
return typeBind;
}
public void setTypeBind(List<String> typeBind) {
this.typeBind = typeBind;
}
public SelectableRelationship getSelectableRelationship() {
return selectableRelationship;
}

View File

@@ -0,0 +1,31 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model.hateoas;
import com.fasterxml.jackson.annotation.JsonUnwrapped;
import org.dspace.app.rest.model.AccessStatusRest;
import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource;
/**
* Access Status Rest HAL Resource. The HAL Resource wraps the REST Resource
* adding support for the links and embedded resources
*/
@RelNameDSpaceResource(AccessStatusRest.NAME)
public class AccessStatusResource extends HALResource<AccessStatusRest> {
@JsonUnwrapped
private AccessStatusRest data;
public AccessStatusResource(AccessStatusRest entry) {
super(entry);
}
public AccessStatusRest getData() {
return data;
}
}

View File

@@ -0,0 +1,61 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.UUID;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import org.dspace.access.status.service.AccessStatusService;
import org.dspace.app.rest.model.AccessStatusRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
* Link repository for calculating the access status of an Item
*/
@Component(ItemRest.CATEGORY + "." + ItemRest.NAME + "." + ItemRest.ACCESS_STATUS)
public class ItemAccessStatusLinkRepository extends AbstractDSpaceRestRepository
implements LinkRestRepository {
@Autowired
ItemService itemService;
@Autowired
AccessStatusService accessStatusService;
@PreAuthorize("hasPermission(#itemId, 'ITEM', 'READ')")
public AccessStatusRest getAccessStatus(@Nullable HttpServletRequest request,
UUID itemId,
@Nullable Pageable optionalPageable,
Projection projection) {
try {
Context context = obtainContext();
Item item = itemService.find(context, itemId);
if (item == null) {
throw new ResourceNotFoundException("No such item: " + itemId);
}
AccessStatusRest accessStatusRest = new AccessStatusRest();
String accessStatus = accessStatusService.getAccessStatus(context, item);
accessStatusRest.setStatus(accessStatus);
return accessStatusRest;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -18,9 +18,9 @@ import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.Parameter;
@@ -296,9 +296,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
protected MetadataFieldRest put(Context context, HttpServletRequest request, String apiCategory, String model,
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
MetadataFieldRest metadataFieldRest = new Gson().fromJson(jsonNode.toString(), MetadataFieldRest.class);
MetadataFieldRest metadataFieldRest;
try {
metadataFieldRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataFieldRest.class);
} catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e);
}
if (isBlank(metadataFieldRest.getElement())) {
if (metadataFieldRest == null || isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
}

View File

@@ -15,9 +15,9 @@ import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import org.dspace.app.rest.exception.DSpaceBadRequestException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.MetadataSchemaRest;
@@ -138,9 +138,14 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
protected MetadataSchemaRest put(Context context, HttpServletRequest request, String apiCategory, String model,
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
MetadataSchemaRest metadataSchemaRest = new Gson().fromJson(jsonNode.toString(), MetadataSchemaRest.class);
MetadataSchemaRest metadataSchemaRest;
try {
metadataSchemaRest = new ObjectMapper().readValue(jsonNode.toString(), MetadataSchemaRest.class);
} catch (JsonProcessingException e) {
throw new UnprocessableEntityException("Cannot parse JSON in request body", e);
}
if (isBlank(metadataSchemaRest.getPrefix())) {
if (metadataSchemaRest == null || isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank");
}
if (isBlank(metadataSchemaRest.getNamespace())) {

View File

@@ -137,7 +137,11 @@ public class EPersonRestAuthenticationProvider implements AuthenticationProvider
output = createAuthentication(newContext);
for (PostLoggedInAction action : postLoggedInActions) {
try {
action.loggedIn(newContext);
} catch (Exception ex) {
log.error("An error occurs performing post logged in action", ex);
}
}
} else {

View File

@@ -306,7 +306,9 @@ public class SubmissionService {
result.setRights(creativeCommonsService.getLicenseName(item));
Bitstream licenseRdfBitstream = creativeCommonsService.getLicenseRdfBitstream(item);
if (licenseRdfBitstream != null) {
result.setFile(converter.toRest(licenseRdfBitstream, Projection.DEFAULT));
}
return result;
}

View File

@@ -31,6 +31,8 @@ import org.dspace.content.InProgressSubmission;
import org.dspace.content.MetadataValue;
import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Describe step for DSpace Spring Rest. Expose and allow patching of the in progress submission metadata. It is
@@ -43,7 +45,11 @@ public class DescribeStep extends AbstractProcessingStep {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(DescribeStep.class);
// Input reader for form configuration
private DCInputsReader inputReader;
// Configuration service
private final ConfigurationService configurationService =
DSpaceServicesFactory.getInstance().getConfigurationService();
public DescribeStep() throws DCInputsReaderException {
inputReader = new DCInputsReader();
@@ -64,8 +70,17 @@ public class DescribeStep extends AbstractProcessingStep {
private void readField(InProgressSubmission obj, SubmissionStepConfig config, DataDescribe data,
DCInputSet inputConfig) throws DCInputsReaderException {
String documentTypeValue = "";
List<MetadataValue> documentType = itemService.getMetadataByMetadataString(obj.getItem(),
configurationService.getProperty("submit.type-bind.field", "dc.type"));
if (documentType.size() > 0) {
documentTypeValue = documentType.get(0).getValue();
}
for (DCInput[] row : inputConfig.getFields()) {
for (DCInput input : row) {
// Is this input allowed for the document type, as per type bind config? If there is no type
// bind set, this is always true
boolean allowed = input.isAllowedFor(documentTypeValue);
List<String> fieldsName = new ArrayList<String>();
if (input.isQualdropValue()) {
@@ -91,6 +106,9 @@ public class DescribeStep extends AbstractProcessingStep {
String[] metadataToCheck = Utils.tokenize(md.getMetadataField().toString());
if (data.getMetadata().containsKey(
Utils.standardize(metadataToCheck[0], metadataToCheck[1], metadataToCheck[2], "."))) {
// If field is allowed by type bind, add value to existing field set, otherwise remove
// all values for this field
if (allowed) {
data.getMetadata()
.get(Utils.standardize(md.getMetadataField().getMetadataSchema().getName(),
md.getMetadataField().getElement(),
@@ -98,6 +116,12 @@ public class DescribeStep extends AbstractProcessingStep {
"."))
.add(dto);
} else {
data.getMetadata().remove(Utils.standardize(metadataToCheck[0], metadataToCheck[1],
metadataToCheck[2], "."));
}
} else {
// Add values only if allowed by type bind
if (allowed) {
List<MetadataValueRest> listDto = new ArrayList<>();
listDto.add(dto);
data.getMetadata()
@@ -111,6 +135,7 @@ public class DescribeStep extends AbstractProcessingStep {
}
}
}
}
@Override
public void doPatchProcessing(Context context, HttpServletRequest currentRequest, InProgressSubmission source,

View File

@@ -16,6 +16,7 @@ import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.model.ErrorRest;
import org.dspace.app.rest.repository.WorkspaceItemRestRepository;
import org.dspace.app.rest.submit.SubmissionService;
import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.app.util.DCInput;
import org.dspace.app.util.DCInputSet;
import org.dspace.app.util.DCInputsReader;
@@ -25,6 +26,7 @@ import org.dspace.content.InProgressSubmission;
import org.dspace.content.MetadataValue;
import org.dspace.content.authority.service.MetadataAuthorityService;
import org.dspace.content.service.ItemService;
import org.dspace.services.ConfigurationService;
/**
* Execute three validation check on fields validation:
@@ -50,12 +52,20 @@ public class MetadataValidation extends AbstractValidation {
private MetadataAuthorityService metadataAuthorityService;
private ConfigurationService configurationService;
@Override
public List<ErrorRest> validate(SubmissionService submissionService, InProgressSubmission obj,
SubmissionStepConfig config) throws DCInputsReaderException, SQLException {
List<ErrorRest> errors = new ArrayList<>();
String documentTypeValue = "";
DCInputSet inputConfig = getInputReader().getInputsByFormName(config.getId());
List<MetadataValue> documentType = itemService.getMetadataByMetadataString(obj.getItem(),
configurationService.getProperty("submit.type-bind.field", "dc.type"));
if (documentType.size() > 0) {
documentTypeValue = documentType.get(0).getValue();
}
for (DCInput[] row : inputConfig.getFields()) {
for (DCInput input : row) {
String fieldKey =
@@ -71,12 +81,21 @@ public class MetadataValidation extends AbstractValidation {
for (int i = 1; i < inputPairs.size(); i += 2) {
String fullFieldname = input.getFieldName() + "." + (String) inputPairs.get(i);
List<MetadataValue> mdv = itemService.getMetadataByMetadataString(obj.getItem(), fullFieldname);
// If the input is not allowed for this type, strip it from item metadata.
if (!input.isAllowedFor(documentTypeValue)) {
itemService.removeMetadataValues(ContextUtil.obtainCurrentRequestContext(),
obj.getItem(), mdv);
} else {
validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors);
if (mdv.size() > 0 && input.isVisible(DCInput.SUBMISSION_SCOPE)) {
foundResult = true;
}
}
if (input.isRequired() && ! foundResult) {
}
// If the input is required but not allowed for this type, and we removed, don't throw
// an error - this way, a field can be required for "Book" to which it is bound, but not
// other types. A user may have switched between types before a final deposit
if (input.isRequired() && !foundResult && input.isAllowedFor(documentTypeValue)) {
// for this required qualdrop no value was found, add to the list of error fields
addError(errors, ERROR_VALIDATION_REQUIRED,
"/" + WorkspaceItemRestRepository.OPERATION_PATH_SECTIONS + "/" + config.getId() + "/" +
@@ -89,6 +108,12 @@ public class MetadataValidation extends AbstractValidation {
for (String fieldName : fieldsName) {
List<MetadataValue> mdv = itemService.getMetadataByMetadataString(obj.getItem(), fieldName);
if (!input.isAllowedFor(documentTypeValue)) {
itemService.removeMetadataValues(ContextUtil.obtainCurrentRequestContext(), obj.getItem(), mdv);
// Continue here, this skips the required check since we've just removed values that previously
// appeared, and the configuration already indicates this field shouldn't be included
continue;
}
validateMetadataValues(mdv, input, config, isAuthorityControlled, fieldKey, errors);
if ((input.isRequired() && mdv.size() == 0) && input.isVisible(DCInput.SUBMISSION_SCOPE)) {
// since this field is missing add to list of error
@@ -124,6 +149,10 @@ public class MetadataValidation extends AbstractValidation {
}
}
public void setConfigurationService(ConfigurationService configurationService) {
this.configurationService = configurationService;
}
public void setItemService(ItemService itemService) {
this.itemService = itemService;
}

View File

@@ -16,6 +16,7 @@
<bean name="metadataValidation" class="org.dspace.app.rest.submit.step.validation.MetadataValidation"
scope="prototype">
<property name="name" value="submission-form"/>
<property name="configurationService" ref="org.dspace.services.ConfigurationService"/>
<property name="itemService" ref="org.dspace.content.ItemServiceImpl"/>
<property name="metadataAuthorityService" ref="org.dspace.content.authority.MetadataAuthorityServiceImpl"/>
</bean>

View File

@@ -10,7 +10,7 @@ package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
@@ -118,7 +118,11 @@ public class CCLicenseAddPatchOperationIT extends AbstractControllerIntegrationT
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.sections", not(hasJsonPath("cclicense"))));
.andExpect(jsonPath("$.sections.cclicense", allOf(
hasJsonPath("$.uri", nullValue()),
hasJsonPath("$.rights",nullValue()),
hasJsonPath("$.file", nullValue())
)));
}

View File

@@ -10,7 +10,7 @@ package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -95,7 +95,11 @@ public class CCLicenseRemovePatchOperationIT extends AbstractControllerIntegrati
.content(removePatch)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.sections", not(hasJsonPath("cclicense"))));
.andExpect(jsonPath("$.sections.cclicense", allOf(
hasJsonPath("$.uri", nullValue()),
hasJsonPath("$.rights",nullValue()),
hasJsonPath("$.file", nullValue())
)));
}

View File

@@ -0,0 +1,119 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.VocabularyMatcher.matchVocabularyEntry;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.sql.SQLException;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.builder.EPersonBuilder;
import org.hamcrest.Matchers;
import org.junit.Test;
/**
* Integration tests for {@link EPersonAuthority}.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*
*/
public class EPersonAuthorityIT extends AbstractControllerIntegrationTest {
@Test
public void testEPersonAuthorityWithFirstName() throws Exception {
context.turnOffAuthorisationSystem();
String firstEPersonId = createEPerson("Luca", "Giamminonni");
String secondEPersonId = createEPerson("Andrea", "Bollini");
String thirdEPersonId = createEPerson("Luca", "Bollini");
context.restoreAuthSystemState();
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", "Luca"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId),
matchVocabularyEntry("Luca Bollini", "Luca Bollini", "vocabularyEntry", thirdEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(2)));
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", "Andrea"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(1)));
}
@Test
public void testEPersonAuthorityWithLastName() throws Exception {
context.turnOffAuthorisationSystem();
String firstEPersonId = createEPerson("Luca", "Giamminonni");
String secondEPersonId = createEPerson("Andrea", "Bollini");
String thirdEPersonId = createEPerson("Luca", "Bollini");
context.restoreAuthSystemState();
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", "Giamminonni"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(1)));
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", "Bollini"))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId),
matchVocabularyEntry("Luca Bollini", "Luca Bollini", "vocabularyEntry", thirdEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(2)));
}
@Test
public void testEPersonAuthorityWithId() throws Exception {
context.turnOffAuthorisationSystem();
String firstEPersonId = createEPerson("Luca", "Giamminonni");
String secondEPersonId = createEPerson("Andrea", "Bollini");
context.restoreAuthSystemState();
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", firstEPersonId))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Luca Giamminonni", "Luca Giamminonni", "vocabularyEntry", firstEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(1)));
getClient(token).perform(get("/api/submission/vocabularies/EPersonAuthority/entries")
.param("filter", secondEPersonId))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded.entries", containsInAnyOrder(
matchVocabularyEntry("Andrea Bollini", "Andrea Bollini", "vocabularyEntry", secondEPersonId))))
.andExpect(jsonPath("$.page.totalElements", Matchers.is(1)));
}
private String createEPerson(String firstName, String lastName) throws SQLException {
return EPersonBuilder.createEPerson(context)
.withNameInMetadata(firstName, lastName)
.build()
.getID()
.toString();
}
}

View File

@@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist;
import static org.dspace.core.Constants.WRITE;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
@@ -3861,6 +3862,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.andExpect(jsonPath("$.inArchive", Matchers.is(false)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/items/" + item.getID().toString())))
.andExpect(jsonPath("$._links.accessStatus.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus")))
.andExpect(jsonPath("$._links.bundles.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles")))
.andExpect(jsonPath("$._links.mappedCollections.href",
@@ -3893,6 +3896,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.andExpect(jsonPath("$.inArchive", Matchers.is(false)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/items/" + item.getID().toString())))
.andExpect(jsonPath("$._links.accessStatus.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus")))
.andExpect(jsonPath("$._links.bundles.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles")))
.andExpect(jsonPath("$._links.mappedCollections.href",
@@ -3926,6 +3931,8 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
Matchers.containsString("/api/core/items/" + item.getID().toString())))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/items/" + item.getID().toString())))
.andExpect(jsonPath("$._links.accessStatus.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/accessStatus")))
.andExpect(jsonPath("$._links.bundles.href",
Matchers.containsString("/api/core/items/" + item.getID().toString() + "/bundles")))
.andExpect(jsonPath("$._links.mappedCollections.href",
@@ -4376,4 +4383,35 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
.andExpect(status().isUnauthorized());
}
@Test
public void findAccessStatusForItemBadRequestTest() throws Exception {
getClient().perform(get("/api/core/items/{uuid}/accessStatus", "1"))
.andExpect(status().isBadRequest());
}
@Test
public void findAccessStatusForItemNotFoundTest() throws Exception {
UUID fakeUUID = UUID.randomUUID();
getClient().perform(get("/api/core/items/{uuid}/accessStatus", fakeUUID))
.andExpect(status().isNotFound());
}
@Test
public void findAccessStatusForItemTest() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection owningCollection = CollectionBuilder.createCollection(context, parentCommunity)
.withName("Owning Collection")
.build();
Item item = ItemBuilder.createItem(context, owningCollection)
.withTitle("Test item")
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/items/{uuid}/accessStatus", item.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.status", notNullValue()));
}
}

View File

@@ -28,6 +28,7 @@ import static org.springframework.test.web.servlet.request.MockMvcRequestBuilder
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -35,7 +36,6 @@ import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.JsonObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.response.QueryResponse;
@@ -566,13 +566,14 @@ public class RelationshipRestRepositoryIT extends AbstractEntityIntegrationTest
.andExpect(jsonPath("$.leftwardValue", is(nullValue())))
.andExpect(jsonPath("$.rightwardValue", is(nullValue())));
JsonObject contentObj = new JsonObject();
contentObj.addProperty("leftwardValue", leftwardValue);
Map<String, String> map = new HashMap<>();
map.put("leftwardValue", leftwardValue);
String json = new ObjectMapper().writeValueAsString(map);
// Add leftwardValue
getClient(token).perform(put("/api/core/relationships/" + idRef)
.contentType("application/json")
.content(contentObj.toString()))
.content(json))
.andExpect(status().isOk());
// Verify leftwardValue is present and rightwardValue not
@@ -624,14 +625,15 @@ public class RelationshipRestRepositoryIT extends AbstractEntityIntegrationTest
.andExpect(jsonPath("$.leftwardValue", is(nullValue())))
.andExpect(jsonPath("$.rightwardValue", is(nullValue())));
JsonObject contentObj = new JsonObject();
contentObj.addProperty("leftwardValue", leftwardValue);
contentObj.addProperty("rightwardValue", rightwardValue);
Map<String, String> map = new HashMap<>();
map.put("leftwardValue", leftwardValue);
map.put("rightwardValue", rightwardValue);
String json = new ObjectMapper().writeValueAsString(map);
// Add leftwardValue and rightwardValue
getClient(token).perform(put("/api/core/relationships/" + idRef)
.contentType("application/json")
.content(contentObj.toString()))
.content(json))
.andExpect(status().isOk());
// Verify leftwardValue and rightwardValue are present

View File

@@ -275,7 +275,7 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
@Test
public void testCreateAndReturnWithPublicProfile() throws Exception {
configurationService.setProperty("researcher-profile.set-new-profile-private", false);
configurationService.setProperty("researcher-profile.set-new-profile-visible", true);
String id = user.getID().toString();
String authToken = getAuthToken(user.getEmail(), password);
@@ -338,6 +338,30 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
.andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson")));
}
@Test
public void testCreateAndReturnWithoutCollectionIdSet() throws Exception {
String id = user.getID().toString();
configurationService.setProperty("researcher-profile.collection.uuid", null);
String authToken = getAuthToken(user.getEmail(), password);
getClient(authToken).perform(post("/api/eperson/profiles/")
.contentType(MediaType.APPLICATION_JSON_VALUE))
.andExpect(status().isCreated())
.andExpect(jsonPath("$.id", is(id)))
.andExpect(jsonPath("$.visible", is(false)))
.andExpect(jsonPath("$.type", is("profile")))
.andExpect(jsonPath("$", matchLinks("http://localhost/api/eperson/profiles/" + id, "item", "eperson")));
String itemId = getItemIdByProfileId(authToken, id);
Item profileItem = itemService.find(context, UUIDUtils.fromString(itemId));
assertThat(profileItem, notNullValue());
assertThat(profileItem.getOwningCollection(), is(personCollection));
}
/**
* Verify that a standard user can't call the createAndReturn endpoint to store
* a new researcher profile related to another user.
@@ -587,6 +611,11 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
.andExpect(status().isOk())
.andExpect(jsonPath("$.visible", is(false)));
String itemId = getItemIdByProfileId(authToken, id);
getClient().perform(get("/api/core/items/{id}", itemId))
.andExpect(status().isUnauthorized());
// change the visibility to true
List<Operation> operations = asList(new ReplaceOperation("/visible", true));
@@ -600,6 +629,9 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
.andExpect(status().isOk())
.andExpect(jsonPath("$.visible", is(true)));
getClient().perform(get("/api/core/items/{id}", itemId))
.andExpect(status().isOk());
// change the visibility to false
operations = asList(new ReplaceOperation("/visible", false));
@@ -613,6 +645,9 @@ public class ResearcherProfileRestRepositoryIT extends AbstractControllerIntegra
.andExpect(status().isOk())
.andExpect(jsonPath("$.visible", is(false)));
getClient().perform(get("/api/core/items/{id}", itemId))
.andExpect(status().isUnauthorized());
}
/**

View File

@@ -30,7 +30,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import com.google.gson.Gson;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.collections4.CollectionUtils;
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
import org.dspace.app.rest.matcher.BitstreamMatcher;
@@ -277,7 +277,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
try {
getClient(token)
.perform(multipart("/api/system/scripts/mock-script/processes")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("mock-script",
@@ -321,7 +321,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
try {
getClient(token)
.perform(multipart("/api/system/scripts/mock-script/processes")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("mock-script",
@@ -358,7 +358,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
try {
getClient(token)
.perform(multipart("/api/system/scripts/mock-script/processes")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("mock-script",
@@ -466,7 +466,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest {
.perform(multipart("/api/system/scripts/mock-script/processes")
.file(bitstreamFile)
.characterEncoding("UTF-8")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("mock-script",

View File

@@ -11,6 +11,7 @@ import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
@@ -113,20 +114,20 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone")))
// check the first two rows
.andExpect(jsonPath("$.rows[0].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author",
SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", null,
null, true,"Add an author", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title",
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", null,
"You must enter a main title for this item.", false,
"Enter the main title of the item.", "dc.title"))))
// check a row with multiple fields
.andExpect(jsonPath("$.rows[3].fields",
contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue",
"You must enter at least the year.", false,
null, "You must enter at least the year.", false,
"Please give the date", "col-sm-4",
"dc.date.issued"),
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher",
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", null,
null, false,"Enter the name of",
"col-sm-8","dc.publisher"))))
;
@@ -144,18 +145,18 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone")))
.andExpect(jsonPath("$.rows[0].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author",
SubmissionFormFieldMatcher.matchFormFieldDefinition("name", "Author", null,
null, true,"Add an author", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title",
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Title", null,
"You must enter a main title for this item.", false,
"Enter the main title of the item.", "dc.title"))))
.andExpect(jsonPath("$.rows[3].fields",contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue",
SubmissionFormFieldMatcher.matchFormFieldDefinition("date", "Date of Issue", null,
"You must enter at least the year.", false,
"Please give the date", "col-sm-4",
"dc.date.issued"),
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher",
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Publisher", null,
null, false,"Enter the name of",
"col-sm-8","dc.publisher"))));
}
@@ -220,20 +221,20 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
// dc.subject fields with in separate rows all linked to an authority with different
// presentation modes (suggestion, name-lookup, lookup)
.andExpect(jsonPath("$.rows[0].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Author",
SubmissionFormFieldMatcher.matchFormFieldDefinition("onebox", "Author", null,
null, true,
"Author field that can be associated with an authority providing suggestion",
null, "dc.contributor.author", "SolrAuthorAuthority")
)))
.andExpect(jsonPath("$.rows[1].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup-name", "Editor",
SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup-name", "Editor", null,
null, false,
"Editor field that can be associated with an authority "
+ "providing the special name lookup",
null, "dc.contributor.editor", "SolrEditorAuthority")
)))
.andExpect(jsonPath("$.rows[2].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup", "Subject",
SubmissionFormFieldMatcher.matchFormFieldDefinition("lookup", "Subject", null,
null, true,
"Subject field that can be associated with an authority providing lookup",
null, "dc.subject", "SolrSubjectAuthority")
@@ -266,7 +267,7 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone")))
// our test configuration include the dc.type field with a value pair in the 8th row
.andExpect(jsonPath("$.rows[7].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("dropdown", "Type",
SubmissionFormFieldMatcher.matchFormFieldDefinition("dropdown", "Type", null,
null, true,
"Select the type(s) of content of the item. To select more than one value in the " +
"list, you may have to hold down the \"CTRL\" or \"Shift\" key.",
@@ -275,6 +276,35 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
;
}
@Test
public void findFieldWithTypeBindConfig() throws Exception {
String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/config/submissionforms/traditionalpageone"))
// The status has to be 200 OK
.andExpect(status().isOk())
// We expect the content type to be "application/hal+json;charset=UTF-8"
.andExpect(content().contentType(contentType))
// Check that the JSON root matches the expected "traditionalpageone" input forms
.andExpect(jsonPath("$.id", is("traditionalpageone")))
.andExpect(jsonPath("$.name", is("traditionalpageone")))
.andExpect(jsonPath("$.type", is("submissionform")))
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/traditionalpageone")))
// check a row with type-bind 'Technical Report'
.andExpect(jsonPath("$.rows[5].fields", contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("series", "Series/Report No.",
"Technical Report", null, true,
"Enter the series and number assigned to this item by your community.",
"dc.relation.ispartofseries"))))
// check the same row with a NON-matching type-bind 'Article' (expect false)
.andExpect(((jsonPath("$.rows[5].fields", not(contains(
SubmissionFormFieldMatcher.matchFormFieldDefinition("series", "Series/Report No.",
"Article", null, true,
"Enter the series and number assigned to this item by your community.",
"dc.relation.ispartofseries")))))));
}
@Test
public void findOpenRelationshipConfig() throws Exception {
String token = getAuthToken(admin.getEmail(), password);
@@ -352,14 +382,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true,
.matchFormFieldDefinition("name", "Autore", null,
"\u00C8" + " richiesto almeno un autore", true,
"Aggiungi un autore", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Titolo",
.matchFormFieldDefinition("onebox", "Titolo", null,
"\u00C8" + " necessario inserire un titolo principale per questo item", false,
"Inserisci titolo principale di questo item", "dc.title"))))
.andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("dropdown", "Lingua", null, false,
.matchFormFieldDefinition("dropdown", "Lingua", null, null, false,
"Selezionare la lingua del contenuto principale dell'item."
+ " Se la lingua non compare nell'elenco, selezionare (Altro)."
+ " Se il contenuto non ha davvero una lingua"
@@ -376,14 +407,14 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Автор", "Потрібно ввести хочаб одного автора!",
.matchFormFieldDefinition("name", "Автор", null, "Потрібно ввести хочаб одного автора!",
true, "Додати автора", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Заголовок",
.matchFormFieldDefinition("onebox", "Заголовок", null,
"Заговолок файла обов'язковий !", false,
"Ввести основний заголовок файла", "dc.title"))))
.andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("dropdown", "Мова", null, false,
.matchFormFieldDefinition("dropdown", "Мова", null, null, false,
"Виберiть мову головного змiсту файлу, як що мови немає у списку, вибрати (Iнша)."
+ " Як що вмiст вайлу не є текстовим, наприклад є фотографiєю, тодi вибрати (N/A)",
null, "dc.language.iso", "common_iso_languages"))));
@@ -431,14 +462,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true,
.matchFormFieldDefinition("name", "Autore", null,
"\u00C8" + " richiesto almeno un autore", true,
"Aggiungi un autore", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Titolo",
.matchFormFieldDefinition("onebox", "Titolo", null,
"\u00C8" + " necessario inserire un titolo principale per questo item", false,
"Inserisci titolo principale di questo item", "dc.title"))))
.andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("dropdown", "Lingua", null, false,
.matchFormFieldDefinition("dropdown", "Lingua", null, null, false,
"Selezionare la lingua del contenuto principale dell'item."
+ " Se la lingua non compare nell'elenco, selezionare (Altro)."
+ " Se il contenuto non ha davvero una lingua"
@@ -455,14 +487,14 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Автор", "Потрібно ввести хочаб одного автора!",
.matchFormFieldDefinition("name", "Автор", null, "Потрібно ввести хочаб одного автора!",
true, "Додати автора", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Заголовок",
.matchFormFieldDefinition("onebox", "Заголовок", null,
"Заговолок файла обов'язковий !", false,
"Ввести основний заголовок файла", "dc.title"))))
.andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("dropdown", "Мова", null, false,
.matchFormFieldDefinition("dropdown", "Мова", null, null, false,
"Виберiть мову головного змiсту файлу, як що мови немає у списку, вибрати (Iнша)."
+ " Як що вмiст вайлу не є текстовим, наприклад є фотографiєю, тодi вибрати (N/A)",
null, "dc.language.iso", "common_iso_languages"))));
@@ -505,14 +537,15 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Autore", "\u00C8" + " richiesto almeno un autore", true,
.matchFormFieldDefinition("name", "Autore", null,
"\u00C8" + " richiesto almeno un autore", true,
"Aggiungi un autore", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Titolo",
.matchFormFieldDefinition("onebox", "Titolo", null,
"\u00C8" + " necessario inserire un titolo principale per questo item", false,
"Inserisci titolo principale di questo item", "dc.title"))))
.andExpect(jsonPath("$.rows[2].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("dropdown", "Lingua", null, false,
.matchFormFieldDefinition("dropdown", "Lingua", null, null, false,
"Selezionare la lingua del contenuto principale dell'item."
+ " Se la lingua non compare nell'elenco, selezionare (Altro)."
+ " Se il contenuto non ha davvero una lingua"
@@ -547,10 +580,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Autore", "\u00C8 richiesto almeno un autore", true,
.matchFormFieldDefinition("name", "Autore", null, "\u00C8 richiesto almeno un autore", true,
"Aggiungi un autore", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Titolo",
.matchFormFieldDefinition("onebox", "Titolo", null,
"\u00C8 necessario inserire un titolo principale per questo item", false,
"Inserisci titolo principale di questo item", "dc.title"))));
resetLocalesConfiguration();
@@ -582,10 +615,10 @@ public class SubmissionFormsControllerIT extends AbstractControllerIntegrationTe
.andExpect(jsonPath("$._links.self.href", Matchers
.startsWith(REST_SERVER_URL + "config/submissionforms/languagetest")))
.andExpect(jsonPath("$.rows[0].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("name", "Autore", "\u00C8 richiesto almeno un autore", true,
.matchFormFieldDefinition("name", "Autore", null, "\u00C8 richiesto almeno un autore", true,
"Aggiungi un autore", "dc.contributor.author"))))
.andExpect(jsonPath("$.rows[1].fields", contains(SubmissionFormFieldMatcher
.matchFormFieldDefinition("onebox", "Titolo",
.matchFormFieldDefinition("onebox", "Titolo", null,
"\u00C8 necessario inserire un titolo principale per questo item", false,
"Inserisci titolo principale di questo item", "dc.title"))));

View File

@@ -1935,6 +1935,141 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
;
}
@Test
/**
* Test the update of metadata for fields configured with type-bind
*
* @throws Exception
*/
public void patchUpdateMetadataWithBindTest() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and two collections.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
String authToken = getAuthToken(eperson.getEmail(), password);
WorkspaceItem witem = WorkspaceItemBuilder.createWorkspaceItem(context, col1)
.withTitle("Workspace Item 1")
.withIssueDate("2017-10-17")
.withSubject("ExtraEntry")
.grantLicense()
.build();
//disable file upload mandatory
configurationService.setProperty("webui.submit.upload.required", false);
context.restoreAuthSystemState();
// Try to add isPartOfSeries (type bound to technical report) - this should not work and instead we'll get
// no JSON path for that field
List<Operation> updateSeries = new ArrayList<Operation>();
List<Map<String, String>> seriesValues = new ArrayList<>();
Map<String, String> value = new HashMap<String, String>();
value.put("value", "New Series");
seriesValues.add(value);
updateSeries.add(new AddOperation("/sections/traditionalpageone/dc.relation.ispartofseries", seriesValues));
String patchBody = getPatchContent(updateSeries);
getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID())
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should match an item with no series or type
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, null, null))));
// Verify that the metadata isn't in the workspace item
getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should match an item with no series or type
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, null, null))));
// Set the type to Technical Report confirm it worked
List<Operation> updateType = new ArrayList<>();
List<Map<String, String>> typeValues = new ArrayList<>();
value = new HashMap<String, String>();
value.put("value", "Technical Report");
typeValues.add(value);
updateType.add(new AddOperation("/sections/traditionalpageone/dc.type", typeValues));
patchBody = getPatchContent(updateType);
getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID())
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should now match an item with the expected type and series
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Technical Report",
null))));
getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Technical Report",
null))));
// Another test, this time adding the series value should be successful and we'll see the value
patchBody = getPatchContent(updateSeries);
getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID())
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should match an item with the expected series and type
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem,
"Technical Report", "New Series"))));
// Verify that the metadata isn't in the workspace item
getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should match an item with the expected series and type
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem,
"Technical Report", "New Series"))));
// One final update, to a different type, this should lose the series as we're back to a non-matching type
updateType = new ArrayList<>();
typeValues = new ArrayList<>();
value = new HashMap<String, String>();
value.put("value", "Article");
typeValues.add(value);
updateType.add(new AddOperation("/sections/traditionalpageone/dc.type", typeValues));
patchBody = getPatchContent(updateType);
getClient(authToken).perform(patch("/api/submission/workspaceitems/" + witem.getID())
.content(patchBody)
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
// Check this - we should NOT match an item with the series "New Series"
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Article",
null))));
getClient(authToken).perform(get("/api/submission/workspaceitems/" + witem.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$.errors").doesNotExist())
.andExpect(jsonPath("$",
Matchers.is(WorkspaceItemMatcher.matchItemWithTypeAndSeries(witem, "Article",
null))));
}
@Test
public void patchUpdateMetadataForbiddenTest() throws Exception {
context.turnOffAuthorisationSystem();

View File

@@ -1,177 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.authorization;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.dspace.app.rest.authorization.impl.ShowClaimItemFeature;
import org.dspace.app.rest.converter.ItemConverter;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.projection.Projection;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.utils.Utils;
import org.dspace.builder.CollectionBuilder;
import org.dspace.builder.CommunityBuilder;
import org.dspace.builder.ItemBuilder;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Test of Show Claim Item Authorization Feature implementation.
*
* @author Corrado Lombardi (corrado.lombardi at 4science.it)
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/
public class ShowClaimItemFeatureIT extends AbstractControllerIntegrationTest {
private Item collectionAProfile;
private Item collectionBProfile;
@Autowired
private ItemConverter itemConverter;
@Autowired
private Utils utils;
@Autowired
private AuthorizationFeatureService authorizationFeatureService;
private AuthorizationFeature showClaimProfileFeature;
private Collection personCollection;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build();
personCollection =
CollectionBuilder.createCollection(context, parentCommunity).withEntityType("Person")
.withName("claimableA").build();
final Collection claimableCollectionB =
CollectionBuilder.createCollection(context, parentCommunity).withEntityType("Person")
.withName("claimableB").build();
collectionAProfile = ItemBuilder.createItem(context, personCollection).build();
collectionBProfile = ItemBuilder.createItem(context, claimableCollectionB).build();
context.restoreAuthSystemState();
showClaimProfileFeature = authorizationFeatureService.find(ShowClaimItemFeature.NAME);
}
@Test
public void testCanClaimAProfile() throws Exception {
String token = getAuthToken(context.getCurrentUser().getEmail(), password);
getClient(token).perform(get("/api/authz/authorizations/search/object")
.param("uri", uri(collectionAProfile))
.param("eperson", context.getCurrentUser().getID().toString())
.param("feature", showClaimProfileFeature.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded").exists())
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
getClient(token).perform(get("/api/authz/authorizations/search/object")
.param("uri", uri(collectionBProfile))
.param("eperson", context.getCurrentUser().getID().toString())
.param("feature", showClaimProfileFeature.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded").exists())
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
}
@Test
public void testNotClaimableEntity() throws Exception {
context.turnOffAuthorisationSystem();
Collection publicationCollection = CollectionBuilder
.createCollection(context, parentCommunity)
.withEntityType("Publication")
.withName("notClaimable")
.build();
context.turnOffAuthorisationSystem();
Item publication = ItemBuilder.createItem(context, publicationCollection).build();
String token = getAuthToken(context.getCurrentUser().getEmail(), password);
getClient(token).perform(get("/api/authz/authorizations/search/object")
.param("uri", uri(publication))
.param("eperson", context.getCurrentUser().getID().toString())
.param("feature", showClaimProfileFeature.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded").doesNotExist())
.andExpect(jsonPath("$.page.totalElements", equalTo(0)));
}
@Test
public void testItemAlreadyInARelation() throws Exception {
context.turnOffAuthorisationSystem();
Item ownedItem = ItemBuilder.createItem(context, personCollection)
.withDspaceObjectOwner("owner", "ownerAuthority").build();
context.restoreAuthSystemState();
String token = getAuthToken(context.getCurrentUser().getEmail(), password);
getClient(token).perform(get("/api/authz/authorizations/search/object")
.param("uri", uri(ownedItem))
.param("eperson", context.getCurrentUser().getID().toString())
.param("feature", showClaimProfileFeature.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded").exists())
.andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1)));
}
@Test
public void testUserWithProfile() throws Exception {
context.turnOffAuthorisationSystem();
ItemBuilder.createItem(context, personCollection)
.withTitle("User")
.withDspaceObjectOwner("User", context.getCurrentUser().getID().toString())
.build();
context.restoreAuthSystemState();
getClient(getAuthToken(context.getCurrentUser().getEmail(), password))
.perform(get("/api/authz/authorizations/search/object")
.param("uri", uri(collectionAProfile))
.param("eperson", context.getCurrentUser().getID().toString())
.param("feature", showClaimProfileFeature.getName()))
.andExpect(status().isOk())
.andExpect(jsonPath("$._embedded").doesNotExist())
.andExpect(jsonPath("$.page.totalElements", equalTo(0)));
}
private String uri(Item item) {
ItemRest itemRest = itemConverter.convert(item, Projection.DEFAULT);
String itemRestURI = utils.linkToSingleResource(itemRest, "self").getHref();
return itemRestURI;
}
}

View File

@@ -18,7 +18,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import com.google.gson.Gson;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
import org.dspace.app.rest.matcher.ProcessMatcher;
import org.dspace.app.rest.model.ParameterValueRest;
@@ -77,7 +77,7 @@ public class CsvExportIT extends AbstractControllerIntegrationTest {
getClient(token)
.perform(multipart("/api/system/scripts/metadata-export/processes")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("metadata-export",
@@ -128,7 +128,7 @@ public class CsvExportIT extends AbstractControllerIntegrationTest {
getClient(token)
.perform(multipart("/api/system/scripts/metadata-export/processes")
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("metadata-export",

View File

@@ -30,7 +30,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import com.google.gson.Gson;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
import org.dspace.app.rest.matcher.ProcessMatcher;
import org.dspace.app.rest.matcher.RelationshipMatcher;
@@ -285,8 +285,7 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
getClient(token)
.perform(multipart("/api/system/scripts/metadata-import/processes").file(bitstreamFile)
.param("properties",
new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andDo(result -> idRef
.set(read(result.getResponse().getContentAsString(), "$.processId")));
@@ -345,8 +344,7 @@ public class CsvImportIT extends AbstractEntityIntegrationTest {
getClient(token)
.perform(multipart("/api/system/scripts/metadata-import/processes").file(bitstreamFile)
.param("properties",
new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("metadata-import",

View File

@@ -50,6 +50,7 @@ public class ItemMatcher {
*/
public static Matcher<? super Object> matchFullEmbeds() {
return matchEmbeds(
"accessStatus",
"bundles[]",
"mappedCollections[]",
"owningCollection",
@@ -65,6 +66,7 @@ public class ItemMatcher {
*/
public static Matcher<? super Object> matchLinks(UUID uuid) {
return HalMatcher.matchLinks(REST_SERVER_URL + "core/items/" + uuid,
"accessStatus",
"bundles",
"mappedCollections",
"owningCollection",

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasNoJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
@@ -28,13 +29,15 @@ public class SubmissionFormFieldMatcher {
/**
* Shortcut for the
* {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, boolean, String, String, String, String)}
* {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, String, boolean, String, String, String, String)}
* with a null style and vocabulary name
*
* @param type
* the expected input type
* @param label
* the expected label
* @param typeBind
* the expected type-bind field(s)
* @param mandatoryMessage
* the expected mandatoryMessage, can be null. If not empty the fiedl is expected to be flagged as
* mandatory
@@ -46,21 +49,23 @@ public class SubmissionFormFieldMatcher {
* the expected metadata
* @return a Matcher for all the condition above
*/
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String mandatoryMessage,
boolean repeatable,
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String typeBind,
String mandatoryMessage, boolean repeatable,
String hints, String metadata) {
return matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, null, metadata);
return matchFormFieldDefinition(type, label, typeBind, mandatoryMessage, repeatable, hints, null, metadata);
}
/**
* Shortcut for the
* {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, boolean, String, String, String, String)}
* {@link SubmissionFormFieldMatcher#matchFormFieldDefinition(String, String, String, String, boolean, String, String, String, String)}
* with a null controlled vocabulary
*
* @param type
* the expected input type
* @param label
* the expected label
* @param typeBind
* the expected type-bind field(s)
* @param mandatoryMessage
* the expected mandatoryMessage, can be null. If not empty the field is expected to be flagged as
* mandatory
@@ -75,10 +80,10 @@ public class SubmissionFormFieldMatcher {
* the expected metadata
* @return a Matcher for all the condition above
*/
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String mandatoryMessage,
boolean repeatable,
String hints, String style, String metadata) {
return matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, style, metadata, null);
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String typeBind,
String mandatoryMessage, boolean repeatable, String hints, String style, String metadata) {
return matchFormFieldDefinition(type, label, typeBind, mandatoryMessage, repeatable, hints, style, metadata,
null);
}
/**
@@ -88,6 +93,8 @@ public class SubmissionFormFieldMatcher {
* the expected input type
* @param label
* the expected label
* @param typeBind
* the expected type-bind field(s)
* @param mandatoryMessage
* the expected mandatoryMessage, can be null. If not empty the field is expected to be flagged as
* mandatory
@@ -100,18 +107,20 @@ public class SubmissionFormFieldMatcher {
* missing
* @param metadata
* the expected metadata
* @param controlled vocabulary
* @param controlledVocabulary
* the expected controlled vocabulary, can be null. If null the corresponding json path is expected to be
* missing
* @return a Matcher for all the condition above
*/
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String mandatoryMessage,
boolean repeatable, String hints, String style,
String metadata, String controlledVocabulary) {
public static Matcher<? super Object> matchFormFieldDefinition(String type, String label, String typeBind,
String mandatoryMessage, boolean repeatable,
String hints, String style, String metadata,
String controlledVocabulary) {
return allOf(
// check each field definition
hasJsonPath("$.input.type", is(type)),
hasJsonPath("$.label", containsString(label)),
typeBind != null ? hasJsonPath("$.typeBind", contains(typeBind)) : hasNoJsonPath("$.typeBind[0]"),
hasJsonPath("$.selectableMetadata[0].metadata", is(metadata)),
controlledVocabulary != null ? hasJsonPath("$.selectableMetadata[0].controlledVocabulary",
is(controlledVocabulary)) : hasNoJsonPath("$.selectableMetadata[0].controlledVocabulary"),
@@ -166,7 +175,7 @@ public class SubmissionFormFieldMatcher {
hasJsonPath("$.selectableRelationship.filter", is(filter)),
hasJsonPath("$.selectableRelationship.searchConfiguration", is(searchConfiguration)),
hasJsonPath("$.selectableRelationship.nameVariants", is(String.valueOf(nameVariants))),
matchFormFieldDefinition(type, label, mandatoryMessage, repeatable, hints, metadata));
matchFormFieldDefinition(type, label, null, mandatoryMessage, repeatable, hints, metadata));
}
/**

View File

@@ -41,4 +41,14 @@ public class VocabularyMatcher {
hasJsonPath("$.type", is(type))
);
}
public static Matcher<? super Object> matchVocabularyEntry(String display, String value, String type,
String authority) {
return allOf(
hasJsonPath("$.display", is(display)),
hasJsonPath("$.value", is(value)),
hasJsonPath("$.type", is(type)),
hasJsonPath("$.authority", is(authority))
);
}
}

View File

@@ -82,6 +82,30 @@ public class WorkspaceItemMatcher {
matchLinks(witem));
}
/**
* Check that the workspace item has the expected type and series values
* (used in type bind evaluation)
* @param witem the workspace item
* @param type the dc.type value eg. Technical Report
* @param series the series value eg. 11-23
* @return Matcher result
*/
public static Matcher matchItemWithTypeAndSeries(WorkspaceItem witem, String type, String series) {
return allOf(
// Check workspaceitem properties
matchProperties(witem),
// Check type appears or is null
type != null ?
hasJsonPath("$.sections.traditionalpageone['dc.type'][0].value", is(type)) :
hasNoJsonPath("$.sections.traditionalpageone['dc.type'][0].value"),
// Check series as it appears (for type bind testing)
series != null ?
hasJsonPath("$.sections.traditionalpageone['dc.relation.ispartofseries'][0].value", is(series)) :
hasNoJsonPath("$.sections.traditionalpageone['dc.relation.ispartofseries'][0].value"),
matchLinks(witem)
);
}
/**
* Check that the id and type are exposed
*

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import org.dspace.access.status.DefaultAccessStatusHelper;
import org.junit.Before;
import org.junit.Test;
/**
* Test the AccessStatusRestTest class
*/
public class AccessStatusRestTest {
AccessStatusRest accessStatusRest;
@Before
public void setUp() throws Exception {
accessStatusRest = new AccessStatusRest();
}
@Test
public void testAccessStatusIsNullBeforeStatusSet() throws Exception {
assertNull(accessStatusRest.getStatus());
}
@Test
public void testAccessStatusIsNotNullAfterStatusSet() throws Exception {
accessStatusRest.setStatus(DefaultAccessStatusHelper.UNKNOWN);
assertNotNull(accessStatusRest.getStatus());
}
}

View File

@@ -19,7 +19,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import com.google.gson.Gson;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.converter.DSpaceRunnableParameterConverter;
import org.dspace.app.rest.matcher.ProcessMatcher;
import org.dspace.app.rest.model.ParameterValueRest;
@@ -88,7 +88,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request with -t <invalidTaskOption>
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -109,7 +109,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request with missing required -i <handle>
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -131,7 +131,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request with missing required -i <handle>
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -171,7 +171,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request without -t <task> or -T <taskFile> (and no -q <queue>)
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -193,7 +193,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request with invalid -s <scope>; must be object, curation or open
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -215,7 +215,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
// Request with invalid -s <scope>; must be object, curation or open
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
// Illegal Argument Exception
.andExpect(status().isBadRequest());
}
@@ -257,7 +257,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
try {
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("curate",
@@ -308,7 +308,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
try {
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(status().isAccepted())
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("curate",
@@ -359,7 +359,7 @@ public class CurationScriptIT extends AbstractControllerIntegrationTest {
getClient(token)
.perform(multipart(CURATE_SCRIPT_ENDPOINT)
.param("properties", new Gson().toJson(list)))
.param("properties", new ObjectMapper().writeValueAsString(list)))
.andExpect(jsonPath("$", is(
ProcessMatcher.matchProcess("curate",
String.valueOf(admin.getID()), parameters,

View File

@@ -61,10 +61,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
</exclusion>
</exclusions>
</dependency>
@@ -127,7 +123,7 @@
<dependency>
<groupId>xom</groupId>
<artifactId>xom</artifactId>
<version>1.2.5</version>
<version>1.3.7</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>

View File

@@ -806,6 +806,22 @@ plugin.single.org.dspace.embargo.EmbargoSetter = org.dspace.embargo.DefaultEmbar
# implementation of embargo lifter plugin - - replace with local implementation if applicable
plugin.single.org.dspace.embargo.EmbargoLifter = org.dspace.embargo.DefaultEmbargoLifter
# values for the forever embargo date threshold
# This threshold date is used in the default access status helper to dermine if an item is
# restricted or embargoed based on the start date of the primary (or first) file policies.
# In this case, if the policy start date is inferior to the threshold date, the status will
# be embargo, else it will be restricted.
# You might want to change this threshold based on your needs. For example: some databases
# doesn't accept a date superior to 31 december 9999.
access.status.embargo.forever.year = 10000
access.status.embargo.forever.month = 1
access.status.embargo.forever.day = 1
# implementation of access status helper plugin - replace with local implementation if applicable
# This default access status helper provides an item status based on the policies of the primary
# bitstream (or first bitstream in the original bundles if no primary file is specified).
plugin.single.org.dspace.access.status.AccessStatusHelper = org.dspace.access.status.DefaultAccessStatusHelper
#### Checksum Checker Settings ####
# Default dispatcher in case none specified
plugin.single.org.dspace.checker.BitstreamDispatcher=org.dspace.checker.SimpleDispatcher
@@ -924,6 +940,11 @@ metadata.hide.eperson.orcid.refresh-token = true
# Defaults to true; If set to 'false', submitter has option to skip upload
#webui.submit.upload.required = true
# Which field should be used for type-bind
# Defaults to 'dc.type'; If changing this value, you must also update the related
# dspace-angular environment configuration property submission.typeBind.field
#submit.type-bind.field = dc.type
#### Creative Commons settings ######
# The url to the web service API
@@ -1514,6 +1535,11 @@ request.item.type = all
# Should all Request Copy emails go to the helpdesk instead of the item submitter?
request.item.helpdesk.override = false
#------------------------------------------------------------------#
#------------------SUBMISSION CONFIGURATION------------------------#
#------------------------------------------------------------------#
# Field to use for type binding, default dc.type
submit.type-bind.field = dc.type
#------------------------------------------------------------------#
#-------------------MODULE CONFIGURATIONS--------------------------#

View File

@@ -1,3 +1,16 @@
## E-mail sent to a restricted Item's author when a user requests a copy.
##
## Parameters: 0 requester's name
## 1 requester's address
## 2 name of a single bitstream, or "all"
## 3 item Handle
## 4 item title
## 5 message from requester
## 6 link back to DSpace for action
## 7 corresponding author name
## 8 corresponding author email
## 9 configuration property "dspace.name"
## 10 configuration property "mail.helpdesk"
#set($subject = 'Request copy of document')
Dear ${params[7]},

View File

@@ -1,13 +0,0 @@
Subject: Request copy of document
Dear Administrator,
A user of {7}, named {0} and using the email {1}, requested a copy of the file(s) associated with the document: "{4}" ({3}).
This request came along with the following message:
"{5}"
To answer, click {6}.
PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S).

View File

@@ -1,26 +0,0 @@
## E-mail sent with the information filled out in a suggest form.
##
## Parameters: {0} recipient name
## {1} sender name
## {2} repository name
## {3} item title
## {4} item handle URI
## {5} item local URL - may be used in lieu of {4} if not using handle server
## {6} collection name
## {7} sender message
## See org.dspace.core.Email for information on the format of this file.
##
#set($subject = 'An item of interest from DSpace')
Hello ${params[0]}:
${params[1]} requested we send you this email regarding an item available in ${params[2]}.
Title: ${params[3]}
Location: ${params[5]}
In Collection: ${params[6]}
Personal Message: ${params[7]}
The DSpace digital repository system captures, stores, indexes, preserves, and distributes digital material.
For more information, visit www.dspace.org

View File

@@ -81,6 +81,8 @@ authority.minconfidence = ambiguous
plugin.named.org.dspace.content.authority.ChoiceAuthority = \
org.dspace.content.authority.EPersonAuthority = EPersonAuthority
# Configuration settings required for Researcher Profiles
# These settings ensure "dspace.object.owner" field are indexed by Authority Control
choices.plugin.dspace.object.owner = EPersonAuthority
choices.presentation.dspace.object.owner = suggest
authority.controlled.dspace.object.owner = true

View File

@@ -2,14 +2,16 @@
#------------------- PROFILE CONFIGURATIONS --------------------#
#---------------------------------------------------------------#
#the entity type of the researcher profile
researcher-profile.entity-type = Person
# The Entity Type to use for the Researcher Profile. Defaults to "Person" as this is the recommended Entity Type to use
#researcher-profile.entity-type = Person
# the uuid of the collection where store the researcher profiles created from scratch
researcher-profile.collection.uuid =
# The UUID of the default Collection where newly created Entities will be stored. If unspecified, the first Collection which supports "entity-type" will be used.
#researcher-profile.collection.uuid =
# if true when the profile is deleted even the related item is deleted, if false only the link between profile and item is removed
# Whether or not to delete the Entity (Item) when a Profile is deleted. Default value is "false" which means that when a user deletes their profile,
# the Entity remains (retaining its data and relationships). When set to "true", the Entity (and its relationships) will be deleted if a user deletes their Profile.
researcher-profile.hard-delete.enabled = false
#true if the new profiles should be private (without anonymous read policy), false otherwise
researcher-profile.set-new-profile-private = true
# Whether a newly created profile should be visible by default. Default value is "false" which means a newly created profile is not readable to
# anonymous users. Setting to "true" means a newly created profile is immediately readable to anonymous users.
researcher-profile.set-new-profile-visible = false

View File

@@ -38,6 +38,7 @@ rest.properties.exposed = orcid.application-client-id
rest.properties.exposed = orcid.authorize-url
rest.properties.exposed = orcid.scope
rest.properties.exposed = orcid.disconnection.allowed-users
rest.properties.exposed = submit.type-bind.field
#---------------------------------------------------------------#
# These configs are used by the deprecated REST (v4-6) module #

View File

@@ -27,6 +27,7 @@
<bean id="coreServiceFactory" class="org.dspace.core.factory.CoreServiceFactoryImpl"/>
<bean id="accessStatusServiceFactory" class="org.dspace.access.status.factory.AccessStatusServiceFactoryImpl"/>
<bean id="disseminateServiceFactory" class="org.dspace.disseminate.factory.DisseminateServiceFactoryImpl"/>
<bean id="embargoServiceFactory" class="org.dspace.embargo.factory.EmbargoServiceFactoryImpl"/>
<bean id="ePersonServiceFactory" class="org.dspace.eperson.factory.EPersonServiceFactoryImpl"/>

View File

@@ -90,8 +90,9 @@
<bean class="org.dspace.disseminate.CitationDocumentServiceImpl"/>
<!-- Ensure EmbargoService is initialized properly via init() method -->
<!-- Ensure EmbargoService and AccessStatusService are initialized properly via init() method -->
<bean class="org.dspace.embargo.EmbargoServiceImpl" init-method="init"/>
<bean class="org.dspace.access.status.AccessStatusServiceImpl" init-method="init"/>
<bean class="org.dspace.eperson.AccountServiceImpl"/>
<bean class="org.dspace.eperson.EPersonServiceImpl"/>

View File

@@ -1438,7 +1438,7 @@
<list>
<!--Only find items into claimable collection defined in cfg-->
<value>search.resourcetype:Item</value>
<value>search.entitytype:${researcher-profile.entity-type}</value>
<value>search.entitytype:${researcher-profile.entity-type:Person}</value>
</list>
</property>
<!--Default result per page -->

20
pom.xml
View File

@@ -29,8 +29,9 @@
<ehcache.version>3.4.0</ehcache.version>
<errorprone.version>2.10.0</errorprone.version>
<!-- NOTE: when updating jackson.version, also sync jackson-databind dependency below -->
<jackson.version>2.12.3</jackson.version>
<!-- NOTE: when updating jackson.version, also sync jackson-databind.version below -->
<jackson.version>2.12.6</jackson.version>
<jackson-databind.version>2.12.6.1</jackson-databind.version>
<javax-annotation.version>1.3.2</javax-annotation.version>
<jaxb-api.version>2.3.1</jaxb-api.version>
<jaxb-runtime.version>2.3.1</jaxb-runtime.version>
@@ -1542,16 +1543,13 @@
<artifactId>fontbox</artifactId>
<version>${pdfbox-version}</version>
</dependency>
<dependency>
<groupId>xalan</groupId>
<artifactId>xalan</artifactId>
<version>2.7.0</version>
</dependency>
<!-- Tika and Jena disagree on version of Xerces to use. Select latest -->
<dependency>
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
<version>2.12.2</version>
</dependency>
<!-- SWORDv1 and SWORDv2 modules both pull in various versions of xml-apis. Select latest version -->
<dependency>
<groupId>xml-apis</groupId>
<artifactId>xml-apis</artifactId>
@@ -1648,12 +1646,6 @@
<version>2.1.210</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.6</version>
<scope>compile</scope>
</dependency>
<!-- Google Analytics -->
<dependency>
<groupId>com.google.apis</groupId>
@@ -1724,7 +1716,7 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
<version>${jackson-databind.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>