Merge pull request #13 from atmire/w2p-58898_place-column-calculation-error-prepare-merge

W2p 58898 place column calculation error prepare merge
This commit is contained in:
benbosman
2019-03-15 13:56:39 +01:00
committed by GitHub
94 changed files with 4187 additions and 908 deletions

View File

@@ -3,3 +3,4 @@
.settings/
*/target/
dspace/modules/*/target/
Dockerfile.*

24
Dockerfile.dependencies Normal file
View File

@@ -0,0 +1,24 @@
# This image will be published as dspace/dspace-dependencies
# The purpose of this image is to make the build for dspace/dspace run faster
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
RUN useradd dspace \
&& mkdir /home/dspace \
&& chown -Rv dspace: /home/dspace
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Trigger the installation of all maven dependencies
# Clean up the built artifacts in the same step to keep the docker image small
RUN mvn package && mvn clean
# Clear the contents of the /app directory so no artifacts are left when dspace:dspace is built
USER root
RUN rm -rf /app/*

View File

@@ -9,20 +9,31 @@
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD . /app/
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
RUN mvn package
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /app /dspace-src
WORKDIR /dspace-src/dspace/target/${TARGET_DIR}
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
@@ -32,23 +43,15 @@ ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant update_configs update_code update_webapps update_solr_indexes
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
COPY --from=ant_build /dspace /dspace
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
# Ant will be embedded in the final container to allow additional deployments
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
ENV DSPACE_INSTALL=/dspace
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \

View File

@@ -5,28 +5,35 @@
# - tomcat:8-jre8
# - ANT 1.10.5
# - maven:3-jdk-8
# - note: expose /solr to any host; provide /rest over http
# - note:
# - default tag for branch: dspace/dspace: dspace/dspace:dspace-7_x-jdk8-test
# Step 1 - Run Maven Build
FROM maven:3-jdk-8 as build
FROM dspace/dspace-dependencies:dspace-7_x as build
ARG TARGET_DIR=dspace-installer
WORKDIR /app
# The dspace-install directory will be written to /install
RUN mkdir /install \
&& chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace
# Copy the DSpace source code into the workdir (excluding .dockerignore contents)
ADD . /app/
ADD --chown=dspace . /app/
COPY dspace/src/main/docker/local.cfg /app/local.cfg
# Provide web.xml overrides to make webapps easier to test
COPY dspace/src/main/docker/test/solr_web.xml /app/dspace-solr/src/main/webapp/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml /app/dspace-rest/src/main/webapp/WEB-INF/web.xml
RUN mvn package
# Build DSpace. Copy the dspace-install directory to /install. Clean up the build to keep the docker image small
RUN mvn package && \
mv /app/dspace/target/${TARGET_DIR}/* /install && \
mvn clean
# Step 2 - Run Ant Deploy
FROM tomcat:8-jre8 as ant_build
ARG TARGET_DIR=dspace-installer
COPY --from=build /app /dspace-src
WORKDIR /dspace-src/dspace/target/${TARGET_DIR}
COPY --from=build /install /dspace-src
WORKDIR /dspace-src
# Create the initial install deployment using ANT
ENV ANT_VERSION 1.10.5
@@ -36,23 +43,15 @@ ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
RUN ant update_configs update_code update_webapps update_solr_indexes
RUN ant init_installation update_configs update_code update_webapps update_solr_indexes
# Step 3 - Run tomcat
# Create a new tomcat image that does not retain the the build directory contents
FROM tomcat:8-jre8
COPY --from=ant_build /dspace /dspace
ENV DSPACE_INSTALL=/dspace
COPY --from=ant_build /dspace $DSPACE_INSTALL
EXPOSE 8080 8009
# Ant will be embedded in the final container to allow additional deployments
ENV ANT_VERSION 1.10.5
ENV ANT_HOME /tmp/ant-$ANT_VERSION
ENV PATH $ANT_HOME/bin:$PATH
RUN mkdir $ANT_HOME && \
wget -qO- "https://www.apache.org/dist/ant/binaries/apache-ant-$ANT_VERSION-bin.tar.gz" | tar -zx --strip-components=1 -C $ANT_HOME
ENV DSPACE_INSTALL=/dspace
ENV JAVA_OPTS=-Xmx2000m
RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr && \
@@ -62,3 +61,9 @@ RUN ln -s $DSPACE_INSTALL/webapps/solr /usr/local/tomcat/webapps/solr
ln -s $DSPACE_INSTALL/webapps/rdf /usr/local/tomcat/webapps/rdf && \
ln -s $DSPACE_INSTALL/webapps/sword /usr/local/tomcat/webapps/sword && \
ln -s $DSPACE_INSTALL/webapps/swordv2 /usr/local/tomcat/webapps/swordv2
COPY dspace/src/main/docker/test/solr_web.xml $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml
COPY dspace/src/main/docker/test/rest_web.xml $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml
RUN sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/solr/WEB-INF/web.xml && \
sed -i -e "s|\${dspace.dir}|$DSPACE_INSTALL|" $DSPACE_INSTALL/webapps/rest/WEB-INF/web.xml

View File

@@ -79,6 +79,57 @@ install, upgrade, customize or host DSpace, then we recommend getting in touch w
The DSpace Issue Tracker can be found at: https://jira.duraspace.org/projects/DS/summary
## Testing
### Running Tests
By default, in DSpace, Unit Tests and Integration Tests are disabled. However, they are
run automatically by [Travis CI](https://travis-ci.org/DSpace/DSpace/) for all Pull Requests and code commits.
* How to run both Unit Tests (via `maven-surefire-plugin`) and Integration Tests (via `maven-failsafe-plugin`):
```
# NOTE: while "mvn test" runs Unit Tests,
# Integration Tests only run for "verify" or "install" phases
mvn clean install -Dmaven.test.skip=false -DskipITs=false
```
* How to run just Unit Tests:
```
mvn clean test -Dmaven.test.skip=false
```
* How to run a *single* Unit Test
```
# Run all tests in a specific test class
# NOTE: testClassName is just the class name, do not include package
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]
# Run one test method in a specific test class
mvn clean test -Dmaven.test.skip=false -Dtest=[testClassName]#[testMethodName]
```
* How to run Integration Tests (requires running Unit tests too)
```
mvn clean verify -Dmaven.test.skip=false -DskipITs=false
```
* How to run a *single* Integration Test (requires running Unit tests too)
```
# Run all integration tests in a specific test class
# NOTE: Integration Tests only run for "verify" or "install" phases
# NOTE: testClassName is just the class name, do not include package
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]
# Run one test method in a specific test class
mvn clean verify -Dmaven.test.skip=false -DskipITs=false -Dit.test=[testClassName]#[testMethodName]
```
* How to run only tests of a specific DSpace module
```
# Before you can run only one module's tests, other modules may need installing into your ~/.m2
cd [dspace-src]
mvn clean install
# Then, move into a module subdirectory, and run the test command
cd [dspace-src]/dspace-spring-rest
# Choose your test command from the lists above
```
## License
DSpace source code is freely available under a standard [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).

View File

@@ -14,6 +14,7 @@ import org.apache.logging.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.dao.MetadataSchemaDAO;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
@@ -33,6 +34,9 @@ public class MetadataSchemaServiceImpl implements MetadataSchemaService {
*/
private static Logger log = org.apache.logging.log4j.LogManager.getLogger(MetadataSchemaServiceImpl.class);
@Autowired
protected MetadataFieldService metadataFieldService;
@Autowired(required = true)
protected AuthorizeService authorizeService;
@@ -115,10 +119,14 @@ public class MetadataSchemaServiceImpl implements MetadataSchemaService {
"Only administrators may modify the metadata registry");
}
log.info(LogManager.getHeader(context, "delete_metadata_schema",
"metadata_schema_id=" + metadataSchema.getID()));
for (MetadataField metadataField : metadataFieldService.findAllInSchema(context, metadataSchema)) {
metadataFieldService.delete(context, metadataField);
}
metadataSchemaDAO.delete(context, metadataSchema);
log.info(LogManager.getHeader(context, "delete_metadata_schema",
"metadata_schema_id=" + metadataSchema.getID()));
}
@Override

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.test;
import java.io.IOException;
import org.dspace.content.DSpaceObject;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Curation task which simply reports its invocation without changing anything.
* Meant for testing.
*
* @author mhwood
*/
public class WorkflowReportTest
extends AbstractCurationTask {
private static final Logger LOG = LoggerFactory.getLogger(WorkflowReportTest.class);
@Override
public int perform(DSpaceObject dso)
throws IOException {
LOG.info("Class {} as task {} received 'perform' for object {}",
WorkflowReportTest.class.getSimpleName(), taskId, dso);
curator.report(String.format(
"Class %s as task %s received 'perform' for object %s%n",
WorkflowReportTest.class.getSimpleName(), taskId, dso));
return Curator.CURATE_SUCCESS;
}
}

View File

@@ -9,6 +9,10 @@ package org.dspace.curate;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
@@ -18,6 +22,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.output.NullOutputStream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -57,7 +62,9 @@ public class CurationCli {
options.addOption("e", "eperson", true,
"email address of curating eperson");
options.addOption("r", "reporter", true,
"reporter to manage results - use '-' to report to console. If absent, no reporting");
"relative or absolute path to the desired report file. "
+ "Use '-' to report to console. "
+ "If absent, no reporting");
options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " +
"applies");
@@ -165,9 +172,17 @@ public class CurationCli {
}
Curator curator = new Curator();
if (reporterName != null) {
curator.setReporter(reporterName);
OutputStream reporter;
if (null == reporterName) {
reporter = new NullOutputStream();
} else if ("-".equals(reporterName)) {
reporter = System.out;
} else {
reporter = new PrintStream(reporterName);
}
Writer reportWriter = new OutputStreamWriter(reporter);
curator.setReporter(reportWriter);
if (scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase());
curator.setTransactionScope(txScope);

View File

@@ -15,6 +15,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -69,16 +70,12 @@ public class Curator {
INTERACTIVE, BATCH, ANY
}
;
// transaction scopes
public static enum TxScope {
OBJECT, CURATION, OPEN
}
;
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(Curator.class);
private static final Logger log = LogManager.getLogger();
protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<>();
@@ -86,7 +83,7 @@ public class Curator {
protected Map<String, TaskRunner> trMap = new HashMap<>();
protected List<String> perfList = new ArrayList<>();
protected TaskQueue taskQ = null;
protected String reporter = null;
protected Appendable reporter = null;
protected Invoked iMode = null;
protected TaskResolver resolver = new TaskResolver();
protected TxScope txScope = TxScope.OPEN;
@@ -193,7 +190,7 @@ public class Curator {
* causes reporting to standard out.
* @return return self (Curator instance) with reporter set
*/
public Curator setReporter(String reporter) {
public Curator setReporter(Appendable reporter) {
this.reporter = reporter;
return this;
}
@@ -346,9 +343,10 @@ public class Curator {
* @param message the message to output to the reporting stream.
*/
public void report(String message) {
// Stub for now
if ("-".equals(reporter)) {
System.out.println(message);
try {
reporter.append(message);
} catch (IOException ex) {
log.error("Task reporting failure", ex);
}
}

View File

@@ -0,0 +1,88 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
* Save a curation report to a unique file in the reports directory.
* Reports are named by the date and time of day, for example:
* "curation-20180916T113903045.report".
*
* @author mhwood
*/
public class FileReporter
implements Reporter {
private final Writer writer;
/**
* Open a writer to a file in a directory named by the configuration
* property {@code report.dir}, or in {@code [DSpace]/reports} if not
* configured.
*
* @throws IOException if there is a problem with the file path.
*/
public FileReporter()
throws IOException {
// Calculate a unique(?) file name.
Date now = GregorianCalendar.getInstance().getTime();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd'T'hhmmssSSS");
String filename = String.format("curation-%s.report", sdf.format(now));
// Build a path to the directory which is to receive the file.
ConfigurationService cfg = new DSpace().getConfigurationService();
String reportDir = cfg.getProperty("report.dir");
Path reportPath;
if (null == reportDir) {
reportPath = Paths.get(cfg.getProperty("dspace.dir"),
"reports",
filename);
} else {
reportPath = Paths.get(reportDir, filename);
}
// Open the file.
writer = new FileWriter(reportPath.toFile());
}
@Override
public Appendable append(CharSequence cs)
throws IOException {
writer.append(cs);
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
writer.append(cs, i, i1);
return this;
}
@Override
public Appendable append(char c) throws IOException {
writer.append(c);
return this;
}
@Override
public void close() throws Exception {
writer.close();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Write curation report records through the logging framework.
* Whole lines (strings ending in '\n') are written to the log category "curation".
* Any partial line is flushed when the reporter is {@code close()}d.
*
* @author mhwood
*/
public class LogReporter
implements Reporter {
private static final Logger LOG = LoggerFactory.getLogger("curation");
private final StringBuilder buffer = new StringBuilder();
@Override
public Appendable append(CharSequence cs)
throws IOException {
for (int pos = 0; pos < cs.length(); pos++) {
char c = cs.charAt(pos);
if (c == '\n') {
LOG.info(buffer.toString());
buffer.delete(0, buffer.length()); // Clear the buffer
} else {
buffer.append(c);
}
}
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
return append(cs.subSequence(i, i1));
}
@Override
public Appendable append(char c)
throws IOException {
return append(String.valueOf(c));
}
@Override
public void close()
throws Exception {
if (buffer.length() > 0) {
LOG.info(buffer.toString());
}
}
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
/**
* A marker interface needed to make curation reporter classes into plugins.
*
* @author mhwood
*/
public interface Reporter
extends Appendable, AutoCloseable {
}

View File

@@ -30,6 +30,8 @@ import org.dspace.content.Item;
import org.dspace.content.service.CollectionService;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.core.service.PluginService;
import org.dspace.curate.service.WorkflowCuratorService;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
@@ -56,9 +58,10 @@ import org.springframework.beans.factory.annotation.Autowired;
public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
/**
* log4j logger
* Logging category
*/
private Logger log = org.apache.logging.log4j.LogManager.getLogger(WorkflowCuratorServiceImpl.class);
private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger();
protected Map<String, TaskSet> tsMap = new HashMap<String, TaskSet>();
@@ -118,6 +121,7 @@ public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
Curator curator = new Curator();
// are we going to perform, or just put on queue?
if (step.queue != null) {
// The queue runner will call setReporter
for (Task task : step.tasks) {
curator.addTask(task.name);
}
@@ -125,7 +129,18 @@ public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
basicWorkflowItemService.update(c, wfi);
return false;
} else {
return curate(curator, c, wfi);
PluginService plugins = CoreServiceFactory.getInstance()
.getPluginService();
try (Reporter reporter
= (Reporter) plugins
.getSinglePlugin(Reporter.class);) {
curator.setReporter(reporter);
boolean status = curate(curator, c, wfi);
reporter.close();
return status;
} catch (Exception e) {
log.error("Failed to close report", e);
}
}
}
return true;

View File

@@ -10,6 +10,7 @@ package org.dspace.storage.rdbms;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
@@ -23,6 +24,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.sql.DataSource;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.core.Context;
@@ -88,7 +90,8 @@ public class DatabaseUtils {
// Usage checks
if (argv.length < 1) {
System.out.println("\nDatabase action argument is missing.");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate' or 'clean'");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate', " +
"'update-sequences' or 'clean'");
System.out.println("\nOr, type 'database help' for more information.\n");
System.exit(1);
}
@@ -328,23 +331,48 @@ public class DatabaseUtils {
e.printStackTrace();
System.exit(1);
}
} else if (argv[0].equalsIgnoreCase("update-sequences")) {
try (Connection connection = dataSource.getConnection()) {
String dbType = getDbType(connection);
String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType +
"/update-sequences.sql";
InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile);
if (sqlstream != null) {
String s = IOUtils.toString(sqlstream, "UTF-8");
if (!s.isEmpty()) {
System.out.println("Running " + sqlfile);
connection.createStatement().execute(s);
System.out.println("update-sequences complete");
} else {
System.err.println(sqlfile + " contains no SQL to execute");
}
} else {
System.err.println(sqlfile + " not found");
}
}
} else {
System.out.println("\nUsage: database [action]");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair' or 'clean'");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', " +
"'update-sequences' or 'clean'");
System.out.println(
" - test = Performs a test connection to database to validate connection settings");
" - test = Performs a test connection to database to " +
"validate connection settings");
System.out.println(
" - info / status = Describe basic info/status about database, including validating the " +
"compatibility of this database");
System.out.println(" - migrate = Migrate the database to the latest version");
System.out.println(
" - repair = Attempt to repair any previously failed database migrations or checksum " +
"mismatches (via Flyway repair)");
" - migrate = Migrate the database to the latest version");
System.out.println(
" - repair = Attempt to repair any previously failed database " +
"migrations or checksum mismatches (via Flyway repair)");
System.out.println(
" - validate = Validate current database's migration status (via Flyway validate), " +
"validating all migration checksums.");
System.out.println(
" - clean = DESTROY all data and tables in database (WARNING there is no going back!). " +
" - update-sequences = Update database sequences after running AIP ingest.");
System.out.println(
" - clean = DESTROY all data and tables in database " +
"(WARNING there is no going back!). " +
"Requires 'db.cleanDisabled=false' setting in config.");
System.out.println("");
System.exit(0);

View File

@@ -0,0 +1,79 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- SQL code to update the ID (primary key) generating sequences, if some
-- import operation has set explicit IDs.
--
-- Sequences are used to generate IDs for new rows in the database. If a
-- bulk import operation, such as an SQL dump, specifies primary keys for
-- imported data explicitly, the sequences are out of sync and need updating.
-- This SQL code does just that.
--
-- This should rarely be needed; any bulk import should be performed using the
-- org.dspace.content API which is safe to use concurrently and in multiple
-- JVMs. The SQL code below will typically only be required after a direct
-- SQL data dump from a backup or somesuch.
-- The 'updateseq' procedure was derived from incseq.sql found at:
-- http://www.akadia.com/services/scripts/incseq.sql
DECLARE
PROCEDURE updateseq ( seq IN VARCHAR,
tbl IN VARCHAR,
attr IN VARCHAR,
cond IN VARCHAR DEFAULT '' ) IS
curr NUMBER := 0;
BEGIN
EXECUTE IMMEDIATE 'SELECT max(' || attr
|| ') FROM ' || tbl
|| ' ' || cond
INTO curr;
curr := curr + 1;
EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq;
EXECUTE IMMEDIATE 'CREATE SEQUENCE '
|| seq
|| ' START WITH '
|| NVL(curr, 1);
END updateseq;
BEGIN
updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry',
'bitstream_format_id');
updateseq('fileextension_seq', 'fileextension', 'file_extension_id');
updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id');
updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id');
updateseq('workflowitem_seq', 'workflowitem', 'workflow_id');
updateseq('tasklistitem_seq', 'tasklistitem', 'tasklist_id');
updateseq('registrationdata_seq', 'registrationdata',
'registrationdata_id');
updateseq('subscription_seq', 'subscription', 'subscription_id');
updateseq('metadatafieldregistry_seq', 'metadatafieldregistry',
'metadata_field_id');
updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id');
updateseq('metadataschemaregistry_seq', 'metadataschemaregistry',
'metadata_schema_id');
updateseq('harvested_collection_seq', 'harvested_collection', 'id');
updateseq('harvested_item_seq', 'harvested_item', 'id');
updateseq('webapp_seq', 'webapp', 'webapp_id');
updateseq('requestitem_seq', 'requestitem', 'requestitem_id');
updateseq('handle_id_seq', 'handle', 'handle_id');
-- Handle Sequence is a special case. Since Handles minted by DSpace
-- use the 'handle_seq', we need to ensure the next assigned handle
-- will *always* be unique. So, 'handle_seq' always needs to be set
-- to the value of the *largest* handle suffix. That way when the
-- next handle is assigned, it will use the next largest number. This
-- query does the following:
-- For all 'handle' values which have a number in their suffix
-- (after '/'), find the maximum suffix value, convert it to a
-- number, and set the 'handle_seq' to start at the next value (see
-- updateseq above for more).
updateseq('handle_seq', 'handle',
q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}',
q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}');
END;

View File

@@ -17,4 +17,16 @@ not realize you manually ran one or more scripts.
Please see the Flyway Documentation for more information: http://flywaydb.org/
## Using the update-sequences.sql script
The `update-sequences.sql` script in this directory may still be used to update
your internal database counts if you feel they have gotten out of "sync". This
may sometimes occur after large restores of content (e.g. when using the DSpace
[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore)
feature).
This `update-sequences.sql` script can be executed by running
"dspace database update-sequences". It will not harm your
database (or its contents) in any way. It just ensures all database counts (i.e.
sequences) are properly set to the next available value.

View File

@@ -1,35 +1,10 @@
--
-- update-sequences.sql
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- Copyright (c) 2002-2016, The DSpace Foundation. All rights reserved.
-- http://www.dspace.org/license/
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- - Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- Neither the name of the DSpace Foundation nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-- DAMAGE.
-- SQL code to update the ID (primary key) generating sequences, if some
-- import operation has set explicit IDs.

View File

@@ -58,7 +58,7 @@ public class CuratorTest
// Get and configure a Curator.
Curator instance = new Curator();
instance.setReporter("-"); // Send any report to standard out. FIXME when DS-3989 is merged
instance.setReporter(System.out); // Send any report to standard out.
instance.addTask(TASK_NAME);
// Configure the run.

View File

@@ -0,0 +1,202 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Site;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.services.ConfigurationService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Drive the Curator and check results.
*
* @author mhwood
*/
public class ITCurator
extends AbstractUnitTest {
Logger LOG = LoggerFactory.getLogger(ITCurator.class);
public ITCurator() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* The report should contain contributions from all tasks and all curated objects.
*
* @throws SQLException passed through.
* @throws IOException passed through.
* @throws AuthorizeException passed through.
*/
@Test
public void testCurate_Reporting()
throws SQLException, IOException, AuthorizeException {
// Configure for testing.
ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
Task1.class.getName() + " = task1");
cfg.addPropertyValue("plugin.named.org.dspace.curate.CurationTask",
Task2.class.getName() + " = task2");
// Create some structure.
context.turnOffAuthorisationSystem();
Site site = ContentServiceFactory.getInstance()
.getSiteService()
.findSite(context);
Community community = ContentServiceFactory.getInstance()
.getCommunityService()
.create(null, context);
// Run some tasks.
ListReporter reporter = new ListReporter();
Curator curator = new Curator();
curator.setReporter(reporter);
curator.addTask("task1");
curator.addTask("task2");
curator.curate(context, site);
// Validate the results.
List<String> report = reporter.getReport();
for (String aReport : report) {
LOG.info("Report: {}", aReport);
}
Pattern pattern;
pattern = Pattern.compile(String.format("task1.*%s", site.getHandle()));
Assert.assertTrue("A report should mention 'task1' and site's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task1.*%s", community.getHandle()));
Assert.assertTrue("A report should mention 'task1' and the community's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task2.*%s", site.getHandle()));
Assert.assertTrue("A report should mention 'task2' and the Site's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task2.*%s", community.getHandle()));
Assert.assertTrue("A report should mention 'task2' and the community's handle",
reportMatcher(report, pattern));
}
/**
* Match a collection of strings against a regular expression.\
*
* @param reports strings to be searched.
* @param pattern expression to be matched.
* @return true if at least one string matches the expression.
*/
private boolean reportMatcher(List<String> reports, Pattern pattern) {
for (String aReport : reports) {
if (pattern.matcher(aReport).find()) {
return true;
}
}
return false;
}
/**
* Dummy curation task for testing. Reports how it was invoked.
*
* @author mhwood
*/
public static class Task1 extends AbstractCurationTask {
public Task1() {
}
@Override
public int perform(DSpaceObject dso)
throws IOException {
curator.report(String.format(
"Task1 received 'perform' on taskId '%s' for object '%s'%n",
taskId, dso.getHandle()));
return Curator.CURATE_SUCCESS;
}
}
/**
* Dummy curation task for testing. Reports how it was invoked.
*
* @author mhwood
*/
public static class Task2 extends AbstractCurationTask {
public Task2() {
}
@Override
public int perform(DSpaceObject dso) throws IOException {
curator.report(String.format(
"Task2 received 'perform' on taskId '%s' for object '%s'%n",
taskId, dso.getHandle()));
return Curator.CURATE_SUCCESS;
}
}
/**
* Absorb report strings into a sequential collection.
*/
class ListReporter
implements Appendable {
private final List<String> report = new ArrayList<>();
/**
* Get the content of the report accumulator.
* @return accumulated reports.
*/
List<String> getReport() {
return report;
}
@Override
public Appendable append(CharSequence cs)
throws IOException {
report.add(cs.toString());
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Appendable append(char c)
throws IOException {
throw new UnsupportedOperationException("Not supported yet.");
}
}
}

View File

@@ -54,15 +54,15 @@ import org.dspace.content.MetadataField;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.xoai.exceptions.CompilingException;
import org.dspace.xoai.services.api.CollectionsService;
import org.dspace.xoai.services.api.cache.XOAICacheService;
import org.dspace.xoai.services.api.cache.XOAIItemCacheService;
import org.dspace.xoai.services.api.cache.XOAILastCompilationCacheService;
import org.dspace.xoai.services.api.config.ConfigurationService;
import org.dspace.xoai.services.api.solr.SolrServerResolver;
import org.dspace.xoai.solr.DSpaceSolrSearch;
import org.dspace.xoai.solr.exceptions.DSpaceSolrException;
@@ -94,6 +94,8 @@ public class XOAI {
private final AuthorizeService authorizeService;
private final ItemService itemService;
private final static ConfigurationService configurationService = DSpaceServicesFactory
.getInstance().getConfigurationService();
private List<String> getFileFormats(Item item) {
List<String> formats = new ArrayList<>();
@@ -283,14 +285,16 @@ public class XOAI {
throws DSpaceSolrIndexerException {
try {
int i = 0;
int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000);
SolrServer server = solrServerResolver.getServer();
ArrayList<SolrInputDocument> list = new ArrayList<>();
while (iterator.hasNext()) {
try {
Item item = iterator.next();
if (item.getHandle() == null) {
log.warn("Skipped item without handle: " + item.getID());
} else {
server.add(this.index(item));
list.add(this.index(item));
}
//Uncache the item to keep memory consumption low
context.uncacheEntity(item);
@@ -300,12 +304,22 @@ public class XOAI {
log.error(ex.getMessage(), ex);
}
i++;
if (i % 100 == 0) {
if (i % 1000 == 0 && batchSize != 1000) {
System.out.println(i + " items imported so far...");
}
if (i % batchSize == 0) {
System.out.println(i + " items imported so far...");
server.add(list);
server.commit();
list.clear();
}
}
System.out.println("Total: " + i + " items");
server.commit();
if (i > 0) {
server.add(list);
server.commit(true, true);
list.clear();
}
return i;
} catch (SolrServerException | IOException ex) {
throw new DSpaceSolrIndexerException(ex.getMessage(), ex);
@@ -334,6 +348,7 @@ public class XOAI {
dates.add(policy.getEndDate());
}
}
context.uncacheEntity(policy);
}
dates.add(item.getLastModified());
Collections.sort(dates);
@@ -458,6 +473,7 @@ public class XOAI {
return true;
}
}
context.uncacheEntity(policy);
}
return false;
}
@@ -477,7 +493,7 @@ public class XOAI {
private static boolean getKnownExplanation(Throwable t) {
if (t instanceof ConnectException) {
System.err.println("Solr server ("
+ ConfigurationManager.getProperty("oai", "solr.url")
+ configurationService.getProperty("oai.solr.url", "")
+ ") is down, turn it on.");
return true;
}
@@ -525,7 +541,6 @@ public class XOAI {
BasicConfiguration.class
});
ConfigurationService configurationService = applicationContext.getBean(ConfigurationService.class);
XOAICacheService cacheService = applicationContext.getBean(XOAICacheService.class);
XOAIItemCacheService itemCacheService = applicationContext.getBean(XOAIItemCacheService.class);
@@ -547,7 +562,7 @@ public class XOAI {
boolean solr = true; // Assuming solr by default
solr = !("database").equals(configurationService.getProperty("oai", "storage"));
solr = !("database").equals(configurationService.getProperty("oai.storage", "solr"));
boolean run = false;
@@ -652,7 +667,7 @@ public class XOAI {
private static void usage() {
boolean solr = true; // Assuming solr by default
solr = !("database").equals(ConfigurationManager.getProperty("oai", "storage"));
solr = !("database").equals(configurationService.getProperty("oai.storage","solr"));
if (solr) {
System.out.println("OAI Manager Script");

View File

@@ -346,6 +346,26 @@ public final class DSpaceConfigurationService implements ConfigurationService {
}
}
@Override
public synchronized boolean addPropertyValue(String name, Object value) {
if (name == null) {
throw new IllegalArgumentException("name cannot be null for setting configuration");
}
if (value == null) {
throw new IllegalArgumentException("configuration value may not be null");
}
// If the value is a type of String, trim any leading/trailing spaces before saving it.
if (String.class.isInstance(value)) {
value = ((String) value).trim();
}
Configuration configuration = getConfiguration();
boolean isNew = !configuration.containsKey(name);
configuration.addProperty(name, value);
return isNew;
}
/* (non-Javadoc)
* @see org.dspace.services.ConfigurationService#setProperty(java.lang.String, java.lang.Object)
*/

View File

@@ -237,6 +237,16 @@ public interface ConfigurationService {
*/
public boolean hasProperty(String name);
/**
* Add a value to a configuration property.
*
* @param name the property name. May not be null.
* @param value the property value. May not be null.
* @return true if a new property was created.
* @throws IllegalArgumentException if the name or value is null.
*/
public boolean addPropertyValue(String name, Object value);
/**
* Set a configuration property (setting) in the system.
* Type is not important here since conversion happens automatically

View File

@@ -226,6 +226,12 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.flipkart.zjsonpatch</groupId>
<artifactId>zjsonpatch</artifactId>
<version>0.4.6</version>
</dependency>
<!-- The HAL Browser -->
<dependency>
<groupId>org.springframework.data</groupId>

View File

@@ -43,7 +43,6 @@ import org.dspace.discovery.SearchUtils;
import org.dspace.discovery.configuration.DiscoveryConfiguration;
import org.dspace.discovery.configuration.DiscoverySearchFilter;
import org.springframework.boot.autoconfigure.web.ErrorController;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
@@ -60,7 +59,7 @@ import org.w3c.dom.Document;
*/
@Controller
@RequestMapping("/opensearch")
public class OpenSearchController implements ErrorController {
public class OpenSearchController {
private static final Logger log = Logger.getLogger(ScopeResolver.class);
private static final String errorpath = "/error";
@@ -192,16 +191,6 @@ public class OpenSearchController implements ErrorController {
}
}
@RequestMapping(value = errorpath)
public String error() {
return "Error handling";
}
@Override
public String getErrorPath() {
return errorpath;
}
/**
* Internal method for controller initialization
*/

View File

@@ -1047,6 +1047,33 @@ public class RestResourceController implements InitializingBean {
return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT);
}
/**
* Execute a PUT request for an entity with id of type UUID;
*
* curl -X PUT http://<dspace.restUrl>/api/{apiCategory}/{model}/{uuid}
*
* Example:
* <pre>
* {@code
* curl -X PUT http://<dspace.restUrl>/api/core/collection/8b632938-77c2-487c-81f0-e804f63e68e6
* }
* </pre>
*
* @param request the http request
* @param apiCategory the API category e.g. "core"
* @param model the DSpace model e.g. "collection"
* @param uuid the ID of the target REST object
* @param jsonNode the part of the request body representing the updated rest object
* @return the relevant REST resource
*/
@RequestMapping(method = RequestMethod.PUT, value = REGEX_REQUESTMAPPING_IDENTIFIER_AS_UUID)
public DSpaceResource<RestAddressableModel> put(HttpServletRequest request,
@PathVariable String apiCategory, @PathVariable String model,
@PathVariable UUID uuid,
@RequestBody JsonNode jsonNode) {
return putOneJsonInternal(request, apiCategory, model, uuid, jsonNode);
}
/**
* Execute a PUT request for an entity with id of type Integer;
*

View File

@@ -25,6 +25,7 @@ import org.dspace.content.service.CollectionService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.services.RequestService;
import org.dspace.services.model.Request;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -71,12 +72,17 @@ public class CollectionConverter
private List<ResourcePolicyRest> getDefaultBitstreamPoliciesForCollection(UUID uuid) {
HttpServletRequest request = requestService.getCurrentRequest().getHttpServletRequest();
Context context = null;
Request currentRequest = requestService.getCurrentRequest();
if (currentRequest != null) {
HttpServletRequest request = currentRequest.getHttpServletRequest();
context = ContextUtil.obtainContext(request);
} else {
context = new Context();
}
Collection collection = null;
List<ResourcePolicy> defaultCollectionPolicies = null;
try {
context = ContextUtil.obtainContext(request);
collection = collectionService.find(context, uuid);
defaultCollectionPolicies = authorizeService.getPoliciesActionFilter(context, collection,
Constants.DEFAULT_BITSTREAM_READ);

View File

@@ -7,12 +7,8 @@
*/
package org.dspace.app.rest.converter;
import java.util.ArrayList;
import java.util.List;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.content.DSpaceObject;
import org.dspace.content.MetadataValue;
import org.springframework.beans.factory.annotation.Autowired;
/**
* This is the base converter from/to objects in the DSpace API data model and
@@ -26,6 +22,9 @@ public abstract class DSpaceObjectConverter<M extends DSpaceObject, R extends or
.DSpaceObjectRest>
extends DSpaceConverter<M, R> {
@Autowired(required = true)
private MetadataConverter metadataConverter;
@Override
public R fromModel(M obj) {
R resource = newInstance();
@@ -34,27 +33,10 @@ public abstract class DSpaceObjectConverter<M extends DSpaceObject, R extends or
resource.setUuid(obj.getID().toString());
}
resource.setName(obj.getName());
List<MetadataValue> fullList = obj.getMetadata();
List<MetadataEntryRest> metadata = convertMetadataToRest(fullList);
resource.setMetadata(metadata);
resource.setMetadata(metadataConverter.convert(obj.getMetadata()));
return resource;
}
public List<MetadataEntryRest> convertMetadataToRest(List<MetadataValue> fullList) {
List<MetadataEntryRest> metadata = new ArrayList<MetadataEntryRest>();
for (MetadataValue mv : fullList) {
MetadataEntryRest me = new MetadataEntryRest();
me.setKey(mv.getMetadataField().toString('.'));
me.setValue(mv.getValue());
me.setLanguage(mv.getLanguage());
me.setPlace(mv.getPlace());
me.setAuthority(mv.getAuthority());
me.setConfidence(mv.getConfidence());
metadata.add(me);
}
return metadata;
}
@Override
public M toModel(R obj) {
return null;

View File

@@ -15,7 +15,6 @@ import java.util.List;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.RelationshipRest;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
@@ -47,6 +46,8 @@ public class ItemConverter extends DSpaceObjectConverter<org.dspace.content.Item
private RelationshipConverter relationshipConverter;
@Autowired
private ItemService itemService;
@Autowired
private MetadataConverter metadataConverter;
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(ItemConverter.class);
@@ -97,8 +98,7 @@ public class ItemConverter extends DSpaceObjectConverter<org.dspace.content.Item
List<MetadataValue> fullList = new LinkedList<>();
fullList = itemService.getMetadata(obj, Item.ANY, Item.ANY, Item.ANY, Item.ANY, true);
List<MetadataEntryRest> metadata = super.convertMetadataToRest(fullList);
item.setMetadata(metadata);
item.setMetadata(metadataConverter.convert(fullList));
return item;

View File

@@ -116,7 +116,8 @@ public class JsonPatchConverter implements PatchConverter<JsonNode> {
Object value = operation.getValue();
if (value != null) {
opNode.set("value", mapper.valueToTree(value));
opNode.set("value", value instanceof JsonValueEvaluator ? ((JsonValueEvaluator) value).getValueNode()
: mapper.valueToTree(value));
}
patchNode.add(opNode);

View File

@@ -0,0 +1,100 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.converter;
import java.sql.SQLException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.DSpaceObjectService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.converter.Converter;
import org.springframework.stereotype.Component;
/**
* Converter to translate between lists of domain {@link MetadataValue}s and {@link MetadataRest} representations.
*/
@Component
public class MetadataConverter implements Converter<List<MetadataValue>, MetadataRest> {
@Autowired
private ContentServiceFactory contentServiceFactory;
@Autowired
private MetadataValueConverter valueConverter;
/**
* Gets a rest representation of the given list of domain metadata values.
*
* @param metadataValueList the domain values.
* @return the rest representation.
*/
@Override
public MetadataRest convert(List<MetadataValue> metadataValueList) {
// Convert each value to a DTO while retaining place order in a map of key -> SortedSet
Map<String, SortedSet<MetadataValueRest>> mapOfSortedSets = new HashMap<>();
for (MetadataValue metadataValue : metadataValueList) {
String key = metadataValue.getMetadataField().toString('.');
SortedSet<MetadataValueRest> set = mapOfSortedSets.get(key);
if (set == null) {
set = new TreeSet<>(Comparator.comparingInt(MetadataValueRest::getPlace));
mapOfSortedSets.put(key, set);
}
set.add(valueConverter.convert(metadataValue));
}
MetadataRest metadataRest = new MetadataRest();
// Populate MetadataRest's map of key -> List while respecting SortedSet's order
Map<String, List<MetadataValueRest>> mapOfLists = metadataRest.getMap();
for (Map.Entry<String, SortedSet<MetadataValueRest>> entry : mapOfSortedSets.entrySet()) {
mapOfLists.put(entry.getKey(), entry.getValue().stream().collect(Collectors.toList()));
}
return metadataRest;
}
/**
* Sets a DSpace object's domain metadata values from a rest representation.
*
* @param context the context to use.
* @param dso the DSpace object.
* @param metadataRest the rest representation of the new metadata.
* @throws SQLException if a database error occurs.
* @throws AuthorizeException if an authorization error occurs.
*/
public void setMetadata(Context context, DSpaceObject dso, MetadataRest metadataRest)
throws SQLException, AuthorizeException {
DSpaceObjectService dsoService = contentServiceFactory.getDSpaceObjectService(dso);
dsoService.clearMetadata(context, dso, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (Map.Entry<String, List<MetadataValueRest>> entry: metadataRest.getMap().entrySet()) {
String[] seq = entry.getKey().split("\\.");
String schema = seq[0];
String element = seq[1];
String qualifier = seq.length == 3 ? seq[2] : null;
for (MetadataValueRest mvr: entry.getValue()) {
dsoService.addMetadata(context, dso, schema, element, qualifier, mvr.getLanguage(),
mvr.getValue(), mvr.getAuthority(), mvr.getConfidence());
}
}
dsoService.update(context, dso);
}
}

View File

@@ -0,0 +1,37 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.converter;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.content.MetadataValue;
import org.springframework.core.convert.converter.Converter;
import org.springframework.stereotype.Component;
/**
* Converter to translate between domain {@link MetadataValue}s and {@link MetadataValueRest} representations.
*/
@Component
public class MetadataValueConverter implements Converter<MetadataValue, MetadataValueRest> {
/**
* Gets a rest representation of the given domain metadata value.
*
* @param metadataValue the domain value.
* @return the rest representation.
*/
@Override
public MetadataValueRest convert(MetadataValue metadataValue) {
MetadataValueRest metadataValueRest = new MetadataValueRest();
metadataValueRest.setValue(metadataValue.getValue());
metadataValueRest.setLanguage(metadataValue.getLanguage());
metadataValueRest.setAuthority(metadataValue.getAuthority());
metadataValueRest.setConfidence(metadataValue.getConfidence());
metadataValueRest.setPlace(metadataValue.getPlace());
return metadataValueRest;
}
}

View File

@@ -22,7 +22,10 @@ import org.springframework.web.bind.annotation.ResponseStatus;
public class PatchBadRequestException extends RuntimeException {
public PatchBadRequestException(String message) {
super(message);
this(message, null);
}
public PatchBadRequestException(String message, Exception e) {
super(message, e);
}
}

View File

@@ -27,6 +27,10 @@ import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.UNPROCESSABLE_ENTITY, reason = "Unprocessable request")
public class UnprocessableEntityException extends RuntimeException {
public UnprocessableEntityException(String message, Throwable cause) {
super(message, cause);
}
public UnprocessableEntityException(String message) {
super(message);
}

View File

@@ -10,6 +10,7 @@ package org.dspace.app.rest.model;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The Collection REST Resource
@@ -45,6 +46,7 @@ public class CollectionRest extends DSpaceObjectRest {
}
@Override
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
public String getType() {
return NAME;
}
@@ -58,4 +60,5 @@ public class CollectionRest extends DSpaceObjectRest {
public void setDefaultAccessConditions(List<ResourcePolicyRest> defaultAccessConditions) {
this.defaultAccessConditions = defaultAccessConditions;
}
}

View File

@@ -7,8 +7,6 @@
*/
package org.dspace.app.rest.model;
import java.util.List;
import org.dspace.app.rest.RestResourceController;
/**
@@ -22,7 +20,7 @@ public abstract class DSpaceObjectRest extends BaseObjectRest<String> {
private String name;
private String handle;
List<MetadataEntryRest> metadata;
MetadataRest metadata = new MetadataRest();
@Override
public String getId() {
@@ -53,11 +51,16 @@ public abstract class DSpaceObjectRest extends BaseObjectRest<String> {
this.handle = handle;
}
public List<MetadataEntryRest> getMetadata() {
/**
* Gets the rest representation of all metadata of the DSpace object.
*
* @return the metadata.
*/
public MetadataRest getMetadata() {
return metadata;
}
public void setMetadata(List<MetadataEntryRest> metadata) {
public void setMetadata(MetadataRest metadata) {
this.metadata = metadata;
}

View File

@@ -11,6 +11,7 @@ import java.util.Date;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The Item REST Resource
@@ -28,8 +29,6 @@ public class ItemRest extends DSpaceObjectRest {
private CollectionRest owningCollection;
@JsonIgnore
private CollectionRest templateItemOf;
//private EPerson submitter;
List<BitstreamRest> bitstreams;
List<RelationshipRest> relationships;
@@ -40,6 +39,7 @@ public class ItemRest extends DSpaceObjectRest {
}
@Override
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
public String getType() {
return NAME;
}

View File

@@ -1,73 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model;
/**
* An embeddable representation of the Metadata to use in with DSpace REST
* Resource
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
public class MetadataEntryRest {
String key;
String value;
String language;
int place;
String authority;
int confidence;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public int getPlace() {
return place;
}
public void setPlace(int place) {
this.place = place;
}
public String getAuthority() {
return authority;
}
public void setAuthority(String authority) {
this.authority = authority;
}
public int getConfidence() {
return confidence;
}
public void setConfidence(int confidence) {
this.confidence = confidence;
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest.model;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.dspace.app.rest.RestResourceController;
/**
@@ -61,6 +62,7 @@ public class MetadataFieldRest extends BaseObjectRest<Integer> {
}
@Override
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
public String getType() {
return NAME;
}

View File

@@ -0,0 +1,69 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model;
import java.util.Arrays;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
/**
* Rest representation of a map of metadata keys to ordered lists of values.
*/
public class MetadataRest {
@JsonAnySetter
private SortedMap<String, List<MetadataValueRest>> map = new TreeMap();
/**
* Gets the map.
*
* @return the map of keys to ordered values.
*/
@JsonAnyGetter
public SortedMap<String, List<MetadataValueRest>> getMap() {
return map;
}
/**
* Sets the metadata values for a given key.
*
* @param key the key.
* @param values the values. The values will be ordered according to their {@code place} value, if
* nonnegative. Values that are negative (the default is -1) are assumed to be non-explicitly
* set and will will be ordered at the end of any explicitly ordered values, in the order
* they are passed to this method.
* @return this instance, to support chaining calls for easy initialization.
*/
public MetadataRest put(String key, MetadataValueRest... values) {
// determine highest explicitly ordered value
int highest = -1;
for (MetadataValueRest value : values) {
if (value.getPlace() > highest) {
highest = value.getPlace();
}
}
// add any non-explicitly ordered values after highest
for (MetadataValueRest value : values) {
if (value.getPlace() < 0) {
highest++;
value.setPlace(highest);
}
}
map.put(key, Arrays.asList(values));
return this;
}
@Override
public boolean equals(Object object) {
return object instanceof MetadataRest && ((MetadataRest) object).getMap().equals(map);
}
}

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.app.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.dspace.app.rest.RestResourceController;
/**
@@ -39,6 +40,7 @@ public class MetadataSchemaRest extends BaseObjectRest<Integer> {
}
@Override
@JsonProperty(access = JsonProperty.Access.READ_ONLY)
public String getType() {
return NAME;
}

View File

@@ -7,8 +7,10 @@
*/
package org.dspace.app.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonProperty.Access;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.dspace.app.rest.converter.MetadataConverter;
/**
* An embeddable representation of the Metadata to use in with DSpace REST
@@ -26,8 +28,25 @@ public class MetadataValueRest {
int confidence;
@JsonProperty(access = Access.READ_ONLY)
int place;
/**
* The order of this metadata value with respect to others in the same DSO with the same key.
*
* In the REST representation, all values of the same key are given as a json array that expresses
* their relative order, so there is no need to expose the exact numeric value publicly. The numeric
* value is only used at this level to ensure the intended order is respected when converting to/from json.
*
* @see MetadataConverter#convert(List)
* @see MetadataRest#put(String, MetadataValueRest...)
*/
@JsonIgnore
int place = -1;
public MetadataValueRest() {
}
public MetadataValueRest(String value) {
this.value = value;
}
public String getValue() {
return value;
@@ -68,5 +87,4 @@ public class MetadataValueRest {
public void setPlace(int place) {
this.place = place;
}
}

View File

@@ -45,4 +45,7 @@ public class JsonValueEvaluator implements LateObjectEvaluator {
}
}
public JsonNode getValueNode() {
return this.valueNode;
}
}

View File

@@ -14,11 +14,14 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.converter.BitstreamConverter;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.model.hateoas.BitstreamResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.service.BitstreamService;
@@ -39,16 +42,15 @@ import org.springframework.stereotype.Component;
*/
@Component(BitstreamRest.CATEGORY + "." + BitstreamRest.NAME)
public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest, UUID> {
public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstream, BitstreamRest> {
private final BitstreamService bs;
@Autowired
BitstreamService bs;
@Autowired
BitstreamConverter converter;
public BitstreamRestRepository() {
System.out.println("Repository initialized by Spring");
public BitstreamRestRepository(BitstreamService dsoService,
BitstreamConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<BitstreamRest>() { });
this.bs = dsoService;
}
@Override
@@ -70,7 +72,7 @@ public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest,
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
return converter.fromModel(bit);
return dsoConverter.fromModel(bit);
}
@Override
@@ -88,10 +90,17 @@ public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest,
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<BitstreamRest> page = new PageImpl<Bitstream>(bit, pageable, total).map(converter);
Page<BitstreamRest> page = new PageImpl<Bitstream>(bit, pageable, total).map(dsoConverter);
return page;
}
@Override
@PreAuthorize("hasPermission(#id, 'BITSTREAM', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override
public Class<BitstreamRest> getDomainClass() {
return BitstreamRest.class;

View File

@@ -7,23 +7,38 @@
*/
package org.dspace.app.rest.repository;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.BadRequestException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.CollectionConverter;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.hateoas.CollectionResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.app.rest.utils.CollectionRestEqualityUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.util.UUIDUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
@@ -39,20 +54,27 @@ import org.springframework.stereotype.Component;
*/
@Component(CollectionRest.CATEGORY + "." + CollectionRest.NAME)
public class CollectionRestRepository extends DSpaceRestRepository<CollectionRest, UUID> {
public class CollectionRestRepository extends DSpaceObjectRestRepository<Collection, CollectionRest> {
private final CollectionService cs;
@Autowired
CommunityService communityService;
@Autowired
CollectionService cs;
@Autowired
CollectionConverter converter;
@Autowired
MetadataConverter metadataConverter;
public CollectionRestRepository() {
System.out.println("Repository initialized by Spring");
@Autowired
CollectionRestEqualityUtils collectionRestEqualityUtils;
public CollectionRestRepository(CollectionService dsoService,
CollectionConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CollectionRest>() {});
this.cs = dsoService;
}
@Override
@@ -67,7 +89,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
if (collection == null) {
return null;
}
return converter.fromModel(collection);
return dsoConverter.fromModel(collection);
}
@Override
@@ -84,7 +106,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CollectionRest> page = new PageImpl<Collection>(collections, pageable, total).map(converter);
Page<CollectionRest> page = new PageImpl<Collection>(collections, pageable, total).map(dsoConverter);
return page;
}
@@ -108,7 +130,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CollectionRest> page = utils.getPage(collections, pageable).map(converter);
Page<CollectionRest> page = utils.getPage(collections, pageable).map(dsoConverter);
return page;
}
@@ -125,10 +147,17 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CollectionRest> page = utils.getPage(collections, pageable).map(converter);
Page<CollectionRest> page = utils.getPage(collections, pageable).map(dsoConverter);
return page;
}
@Override
@PreAuthorize("hasPermission(#id, 'COLLECTION', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override
public Class<CollectionRest> getDomainClass() {
return CollectionRest.class;
@@ -139,4 +168,97 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
return new CollectionResource(collection, utils, rels);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected CollectionRest createAndReturn(Context context) throws AuthorizeException {
HttpServletRequest req = getRequestService().getCurrentRequest().getHttpServletRequest();
ObjectMapper mapper = new ObjectMapper();
CollectionRest collectionRest;
try {
ServletInputStream input = req.getInputStream();
collectionRest = mapper.readValue(input, CollectionRest.class);
} catch (IOException e1) {
throw new UnprocessableEntityException("Error parsing request body: " + e1.toString());
}
Collection collection;
String parentCommunityString = req.getParameter("parent");
try {
Community parent = null;
if (StringUtils.isNotBlank(parentCommunityString)) {
UUID parentCommunityUuid = UUIDUtils.fromString(parentCommunityString);
if (parentCommunityUuid == null) {
throw new BadRequestException("The given parent was invalid: "
+ parentCommunityString);
}
parent = communityService.find(context, parentCommunityUuid);
if (parent == null) {
throw new UnprocessableEntityException("Parent community for id: "
+ parentCommunityUuid + " not found");
}
} else {
throw new BadRequestException("The parent parameter cannot be left empty," +
"collections require a parent community.");
}
collection = cs.create(context, parent);
cs.update(context, collection);
metadataConverter.setMetadata(context, collection, collectionRest.getMetadata());
} catch (SQLException e) {
throw new RuntimeException("Unable to create new Collection under parent Community " +
parentCommunityString, e);
}
return converter.convert(collection);
}
@Override
@PreAuthorize("hasPermission(#id, 'COLLECTION', 'WRITE')")
protected CollectionRest put(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
JsonNode jsonNode)
throws RepositoryMethodNotImplementedException, SQLException, AuthorizeException {
CollectionRest collectionRest;
try {
collectionRest = new ObjectMapper().readValue(jsonNode.toString(), CollectionRest.class);
} catch (IOException e) {
throw new UnprocessableEntityException("Error parsing collection json: " + e.getMessage());
}
Collection collection = cs.find(context, id);
if (collection == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + id + " not found");
}
CollectionRest originalCollectionRest = converter.fromModel(collection);
if (collectionRestEqualityUtils.isCollectionRestEqualWithoutMetadata(originalCollectionRest, collectionRest)) {
metadataConverter.setMetadata(context, collection, collectionRest.getMetadata());
} else {
throw new IllegalArgumentException("The UUID in the Json and the UUID in the url do not match: "
+ id + ", "
+ collectionRest.getId());
}
return converter.fromModel(collection);
}
@Override
@PreAuthorize("hasPermission(#id, 'COLLECTION', 'DELETE')")
protected void delete(Context context, UUID id) throws AuthorizeException {
Collection collection = null;
try {
collection = cs.find(context, id);
if (collection == null) {
throw new ResourceNotFoundException(
CollectionRest.CATEGORY + "." + CollectionRest.NAME + " with id: " + id + " not found");
}
} catch (SQLException e) {
throw new RuntimeException("Unable to find Collection with id = " + id, e);
}
try {
cs.delete(context, collection);
} catch (SQLException e) {
throw new RuntimeException("Unable to delete Collection with id = " + id, e);
} catch (IOException e) {
throw new RuntimeException("Unable to delete collection because the logo couldn't be deleted", e);
}
}
}

View File

@@ -12,22 +12,29 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.BadRequestException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.CommunityConverter;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.hateoas.CommunityResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.app.rest.utils.CommunityRestEqualityUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.service.CommunityService;
import org.dspace.core.Context;
import org.dspace.util.UUIDUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
@@ -43,16 +50,23 @@ import org.springframework.stereotype.Component;
*/
@Component(CommunityRest.CATEGORY + "." + CommunityRest.NAME)
public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest, UUID> {
public class CommunityRestRepository extends DSpaceObjectRestRepository<Community, CommunityRest> {
@Autowired
CommunityService cs;
private final CommunityService cs;
@Autowired
CommunityConverter converter;
public CommunityRestRepository() {
System.out.println("Repository initialized by Spring");
@Autowired
MetadataConverter metadataConverter;
@Autowired
CommunityRestEqualityUtils communityRestEqualityUtils;
public CommunityRestRepository(CommunityService dsoService,
CommunityConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CommunityRest>() {});
this.cs = dsoService;
}
@Override
@@ -60,7 +74,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
protected CommunityRest createAndReturn(Context context) throws AuthorizeException {
HttpServletRequest req = getRequestService().getCurrentRequest().getHttpServletRequest();
ObjectMapper mapper = new ObjectMapper();
CommunityRest communityRest = null;
CommunityRest communityRest;
try {
ServletInputStream input = req.getInputStream();
communityRest = mapper.readValue(input, CommunityRest.class);
@@ -68,23 +82,34 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
throw new UnprocessableEntityException("Error parsing request body: " + e1.toString());
}
Community community = null;
Community community;
try {
community = cs.create(null, context);
Community parent = null;
String parentCommunityString = req.getParameter("parent");
if (StringUtils.isNotBlank(parentCommunityString)) {
UUID parentCommunityUuid = UUIDUtils.fromString(parentCommunityString);
if (parentCommunityUuid == null) {
throw new BadRequestException("The given parent parameter was invalid: "
+ parentCommunityString);
}
parent = cs.find(context, parentCommunityUuid);
if (parent == null) {
throw new UnprocessableEntityException("Parent community for id: "
+ parentCommunityUuid + " not found");
}
}
community = cs.create(parent, context);
cs.update(context, community);
if (communityRest.getMetadata() != null) {
for (MetadataEntryRest mer : communityRest.getMetadata()) {
String[] metadatakey = mer.getKey().split("\\.");
cs.addMetadata(context, community, metadatakey[0], metadatakey[1],
metadatakey.length == 3 ? metadatakey[2] : null, mer.getLanguage(), mer.getValue());
}
}
metadataConverter.setMetadata(context, community, communityRest.getMetadata());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
return converter.convert(community);
return dsoConverter.convert(community);
}
@Override
@@ -99,7 +124,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
if (community == null) {
return null;
}
return converter.fromModel(community);
return dsoConverter.fromModel(community);
}
@Override
@@ -116,7 +141,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CommunityRest> page = new PageImpl<Community>(communities, pageable, total).map(converter);
Page<CommunityRest> page = new PageImpl<Community>(communities, pageable, total).map(dsoConverter);
return page;
}
@@ -130,7 +155,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CommunityRest> page = utils.getPage(topCommunities, pageable).map(converter);
Page<CommunityRest> page = utils.getPage(topCommunities, pageable).map(dsoConverter);
return page;
}
@@ -151,10 +176,17 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<CommunityRest> page = utils.getPage(subCommunities, pageable).map(converter);
Page<CommunityRest> page = utils.getPage(subCommunities, pageable).map(dsoConverter);
return page;
}
@Override
@PreAuthorize("hasPermission(#id, 'COMMUNITY', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override
public Class<CommunityRest> getDomainClass() {
return CommunityRest.class;
@@ -165,4 +197,49 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
return new CommunityResource(community, utils, rels);
}
@Override
@PreAuthorize("hasPermission(#id, 'COMMUNITY', 'WRITE')")
protected CommunityRest put(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
JsonNode jsonNode)
throws RepositoryMethodNotImplementedException, SQLException, AuthorizeException {
CommunityRest communityRest;
try {
communityRest = new ObjectMapper().readValue(jsonNode.toString(), CommunityRest.class);
} catch (IOException e) {
throw new UnprocessableEntityException("Error parsing community json: " + e.getMessage());
}
Community community = cs.find(context, id);
if (community == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + id + " not found");
}
CommunityRest originalCommunityRest = converter.fromModel(community);
if (communityRestEqualityUtils.isCommunityRestEqualWithoutMetadata(originalCommunityRest, communityRest)) {
metadataConverter.setMetadata(context, community, communityRest.getMetadata());
} else {
throw new UnprocessableEntityException("The given JSON and the original Community differ more " +
"than just the metadata");
}
return converter.fromModel(community);
}
@Override
@PreAuthorize("hasPermission(#id, 'COMMUNITY', 'DELETE')")
protected void delete(Context context, UUID id) throws AuthorizeException {
Community community = null;
try {
community = cs.find(context, id);
if (community == null) {
throw new ResourceNotFoundException(
CommunityRest.CATEGORY + "." + CommunityRest.NAME + " with id: " + id + " not found");
}
} catch (SQLException e) {
throw new RuntimeException("Unable to find Community with id = " + id, e);
}
try {
cs.delete(context, community);
} catch (SQLException e) {
throw new RuntimeException("Unable to delete Community with id = " + id, e);
} catch (IOException e) {
throw new RuntimeException("Unable to delete community because the logo couldn't be deleted", e);
}
}
}

View File

@@ -0,0 +1,86 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.rest.converter.DSpaceObjectConverter;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.DSpaceObjectRest;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.DSpaceObjectService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
/**
* Base class for DSpaceObject-based Rest Repositories, providing common functionality.
*
* @param <M> the specific type of DSpaceObject.
* @param <R> the corresponding DSpaceObjectRest.
*/
public abstract class DSpaceObjectRestRepository<M extends DSpaceObject, R extends DSpaceObjectRest>
extends DSpaceRestRepository<R, UUID> {
final DSpaceObjectService<M> dsoService;
final DSpaceObjectPatch<R> dsoPatch;
final DSpaceObjectConverter<M, R> dsoConverter;
@Autowired
MetadataConverter metadataConverter;
DSpaceObjectRestRepository(DSpaceObjectService<M> dsoService,
DSpaceObjectConverter<M, R> dsoConverter,
DSpaceObjectPatch<R> dsoPatch) {
this.dsoService = dsoService;
this.dsoPatch = dsoPatch;
this.dsoConverter = dsoConverter;
}
/**
* Updates the DSpaceObject according to the given Patch.
*
* @param apiCategory the api category.
* @param model the api model.
* @param id the id of the DSpaceObject.
* @param patch the patch to apply.
* @throws AuthorizeException if the action is unauthorized.
* @throws ResourceNotFoundException if the DSpace object was not found.
* @throws SQLException if a database error occurs.
* @throws UnprocessableEntityException if the patch attempts to modify an unmodifiable attribute of the object.
*/
protected void patchDSpaceObject(String apiCategory, String model, UUID id, Patch patch)
throws AuthorizeException, ResourceNotFoundException, SQLException, UnprocessableEntityException {
M dso = dsoService.find(obtainContext(), id);
if (dso == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + id + " not found");
}
R dsoRest = dsoPatch.patch(findOne(id), patch.getOperations());
updateDSpaceObject(dso, dsoRest);
}
/**
* Applies the changes in the given rest DSpace object to the model DSpace object.
* The default implementation updates metadata if needed. Subclasses should extend
* to support updates of additional properties.
*
* @param dso the dso to apply changes to.
* @param dsoRest the rest representation of the new desired state.
*/
protected void updateDSpaceObject(M dso, R dsoRest)
throws AuthorizeException, SQLException {
R origDsoRest = dsoConverter.fromModel(dso);
if (!origDsoRest.getMetadata().equals(dsoRest.getMetadata())) {
metadataConverter.setMetadata(obtainContext(), dso, dsoRest.getMetadata());
}
}
}

View File

@@ -27,6 +27,7 @@ import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.util.DCInputsReaderException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
@@ -56,6 +57,9 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
@Autowired
private DSpaceRestRepository<T, ID> thisRepository;
@Autowired
private MetadataFieldService metadataFieldService;
@Override
public <S extends T> S save(S entity) {
Context context = null;
@@ -451,24 +455,28 @@ public abstract class DSpaceRestRepository<T extends RestAddressableModel, ID ex
}
/**
* Apply an update to the REST object via JSON PUT
* This method will fully replace the REST object with the given UUID with the REST object that is described
* in the JsonNode parameter
*
* @param request the http request
* @param apiCategory the API category e.g. "api"
* @param model the DSpace model e.g. "metadatafield"
* @param id the ID of the target REST object
* @param uuid the ID of the target REST object
* @param jsonNode the part of the request body representing the updated rest object
* @return the updated REST object
*/
public T put(HttpServletRequest request, String apiCategory, String model, ID id, JsonNode jsonNode) {
public T put(HttpServletRequest request, String apiCategory, String model, ID uuid, JsonNode jsonNode) {
Context context = obtainContext();
try {
thisRepository.put(context, request, apiCategory, model, id, jsonNode);
thisRepository.put(context, request, apiCategory, model, uuid, jsonNode);
context.commit();
} catch (SQLException | AuthorizeException e) {
throw new RuntimeException(e.getMessage(), e);
} catch (SQLException e) {
throw new RuntimeException("Unable to update DSpace object " + model + " with id=" + uuid, e);
} catch (AuthorizeException e) {
throw new RuntimeException("Unable to perform PUT request as the " +
"current user does not have sufficient rights", e);
}
return findOne(id);
return findOne(uuid);
}
/**

View File

@@ -12,7 +12,6 @@ import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -20,26 +19,22 @@ import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.EPersonConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.RESTAuthorizationException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.EPersonRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.hateoas.EPersonResource;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.EPersonPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
@@ -51,18 +46,26 @@ import org.springframework.stereotype.Component;
*/
@Component(EPersonRest.CATEGORY + "." + EPersonRest.NAME)
public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUID> {
EPersonService es = EPersonServiceFactory.getInstance().getEPersonService();
public class EPersonRestRepository extends DSpaceObjectRestRepository<EPerson, EPersonRest> {
@Autowired
AuthorizeService authorizeService;
private final EPersonService es;
@Autowired
EPersonConverter converter;
MetadataConverter metadataConverter;
@Autowired
EPersonPatch epersonPatch;
public EPersonRestRepository(EPersonService dsoService,
EPersonConverter dsoConverter,
EPersonPatch dsoPatch) {
super(dsoService, dsoConverter, dsoPatch);
this.es = dsoService;
}
@Override
protected EPersonRest createAndReturn(Context context)
throws AuthorizeException {
@@ -89,18 +92,12 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
es.setPassword(eperson, epersonRest.getPassword());
}
es.update(context, eperson);
if (epersonRest.getMetadata() != null) {
for (MetadataEntryRest mer : epersonRest.getMetadata()) {
String[] metadatakey = mer.getKey().split("\\.");
es.addMetadata(context, eperson, metadatakey[0], metadatakey[1],
metadatakey.length == 3 ? metadatakey[2] : null, mer.getLanguage(), mer.getValue());
}
}
metadataConverter.setMetadata(context, eperson, epersonRest.getMetadata());
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
return converter.convert(eperson);
return dsoConverter.convert(eperson);
}
@Override
@@ -115,7 +112,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
if (eperson == null) {
return null;
}
return converter.fromModel(eperson);
return dsoConverter.fromModel(eperson);
}
@Override
@@ -133,7 +130,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(converter);
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(dsoConverter);
return page;
}
@@ -159,7 +156,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(converter);
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(dsoConverter);
return page;
}
@@ -185,42 +182,22 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
if (eperson == null) {
return null;
}
return converter.fromModel(eperson);
return dsoConverter.fromModel(eperson);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
public void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
Patch patch)
throws UnprocessableEntityException, PatchBadRequestException, AuthorizeException,
ResourceNotFoundException {
try {
EPerson eperson = es.find(context, uuid);
if (eperson == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found");
}
List<Operation> operations = patch.getOperations();
EPersonRest ePersonRest = findOne(context, uuid);
EPersonRest patchedModel = (EPersonRest) epersonPatch.patch(ePersonRest, operations);
updatePatchedValues(context, patchedModel, eperson);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, uuid, patch);
}
/**
* Applies changes in the rest model.
* @param context
* @param ePersonRest the updated eperson rest
* @param ePerson the eperson content object
* @throws SQLException
* @throws AuthorizeException
*/
private void updatePatchedValues(Context context, EPersonRest ePersonRest, EPerson ePerson)
throws SQLException, AuthorizeException {
@Override
protected void updateDSpaceObject(EPerson ePerson, EPersonRest ePersonRest)
throws AuthorizeException, SQLException {
super.updateDSpaceObject(ePerson, ePersonRest);
Context context = obtainContext();
if (ePersonRest.getPassword() != null) {
es.setPassword(ePerson, ePersonRest.getPassword());
}
@@ -235,7 +212,6 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
}
es.update(context, ePerson);
}
@Override

View File

@@ -16,11 +16,13 @@ import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.converter.GroupConverter;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.GroupRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.hateoas.GroupResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context;
import org.dspace.eperson.Group;
@@ -39,12 +41,19 @@ import org.springframework.stereotype.Component;
*/
@Component(GroupRest.CATEGORY + "." + GroupRest.NAME)
public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
public class GroupRestRepository extends DSpaceObjectRestRepository<Group, GroupRest> {
@Autowired
GroupService gs;
@Autowired
GroupConverter converter;
GroupRestRepository(GroupService dsoService,
GroupConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<GroupRest>() {});
this.gs = dsoService;
}
@Autowired
MetadataConverter metadataConverter;
@Override
@PreAuthorize("hasAuthority('ADMIN')")
@@ -65,19 +74,12 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
group = gs.create(context);
gs.setName(group, groupRest.getName());
gs.update(context, group);
if (groupRest.getMetadata() != null) {
for (MetadataEntryRest mer: groupRest.getMetadata()) {
String[] metadatakey = mer.getKey().split("\\.");
gs.addMetadata(context, group, metadatakey[0], metadatakey[1],
metadatakey.length == 3 ? metadatakey[2] : null, mer.getLanguage(), mer.getValue());
}
}
metadataConverter.setMetadata(context, group, groupRest.getMetadata());
} catch (SQLException excSQL) {
throw new RuntimeException(excSQL.getMessage(), excSQL);
}
return converter.convert(group);
return dsoConverter.convert(group);
}
@Override
@@ -92,7 +94,7 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
if (group == null) {
return null;
}
return converter.fromModel(group);
return dsoConverter.fromModel(group);
}
@PreAuthorize("hasAuthority('ADMIN')")
@@ -106,10 +108,17 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<GroupRest> page = new PageImpl<Group>(groups, pageable, total).map(converter);
Page<GroupRest> page = new PageImpl<Group>(groups, pageable, total).map(dsoConverter);
return page;
}
@Override
@PreAuthorize("hasPermission(#id, 'GROUP', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override
public Class<GroupRest> getDomainClass() {
return GroupRest.class;

View File

@@ -13,21 +13,32 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.BadRequestException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.rest.converter.ItemConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.hateoas.ItemResource;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.ItemPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.InstallItemService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.core.Context;
import org.dspace.util.UUIDUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
@@ -43,22 +54,35 @@ import org.springframework.stereotype.Component;
*/
@Component(ItemRest.CATEGORY + "." + ItemRest.NAME)
public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
public class ItemRestRepository extends DSpaceObjectRestRepository<Item, ItemRest> {
private static final Logger log = Logger.getLogger(ItemRestRepository.class);
@Autowired
ItemService is;
private final ItemService is;
@Autowired
ItemConverter converter;
MetadataConverter metadataConverter;
@Autowired
ItemPatch itemPatch;
@Autowired
WorkspaceItemService workspaceItemService;
public ItemRestRepository() {
System.out.println("Repository initialized by Spring");
@Autowired
ItemService itemService;
@Autowired
CollectionService collectionService;
@Autowired
InstallItemService installItemService;
public ItemRestRepository(ItemService dsoService,
ItemConverter dsoConverter,
ItemPatch dsoPatch) {
super(dsoService, dsoConverter, dsoPatch);
this.is = dsoService;
}
@Override
@@ -73,7 +97,7 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
if (item == null) {
return null;
}
return converter.fromModel(item);
return dsoConverter.fromModel(item);
}
@Override
@@ -92,41 +116,23 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<ItemRest> page = new PageImpl<Item>(items, pageable, total).map(converter);
Page<ItemRest> page = new PageImpl<Item>(items, pageable, total).map(dsoConverter);
return page;
}
@Override
public void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
Patch patch)
throws UnprocessableEntityException, PatchBadRequestException, SQLException, AuthorizeException,
ResourceNotFoundException {
Item item = is.find(context, uuid);
if (item == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found");
@PreAuthorize("hasPermission(#id, 'ITEM', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
List<Operation> operations = patch.getOperations();
ItemRest itemRest = findOne(uuid);
@Override
protected void updateDSpaceObject(Item item, ItemRest itemRest)
throws AuthorizeException, SQLException {
super.updateDSpaceObject(item, itemRest);
ItemRest patchedModel = (ItemRest) itemPatch.patch(itemRest, operations);
updatePatchedValues(context, patchedModel, item);
}
/**
* Persists changes to the rest model.
* @param context
* @param itemRest the updated item rest resource
* @param item the item content object
* @throws SQLException
* @throws AuthorizeException
*/
private void updatePatchedValues(Context context, ItemRest itemRest, Item item)
throws SQLException, AuthorizeException {
try {
Context context = obtainContext();
if (itemRest.getWithdrawn() != item.isWithdrawn()) {
if (itemRest.getWithdrawn()) {
is.withdraw(context, item);
@@ -138,10 +144,6 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
item.setDiscoverable(itemRest.getDiscoverable());
is.update(context, item);
}
} catch (SQLException | AuthorizeException e) {
e.printStackTrace();
throw e;
}
}
@Override
@@ -155,10 +157,15 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected void delete(Context context, UUID id) throws AuthorizeException {
Item item = null;
try {
item = is.find(context, id);
if (item == null) {
throw new ResourceNotFoundException(ItemRest.CATEGORY + "." + ItemRest.NAME +
" with id: " + id + " not found");
}
if (is.isInProgressSubmission(context, item)) {
throw new UnprocessableEntityException("The item cannot be deleted. "
+ "It's part of a in-progress submission.");
@@ -177,4 +184,68 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
}
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected ItemRest createAndReturn(Context context) throws AuthorizeException, SQLException {
HttpServletRequest req = getRequestService().getCurrentRequest().getHttpServletRequest();
String owningCollectionUuidString = req.getParameter("owningCollection");
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = null;
try {
ServletInputStream input = req.getInputStream();
itemRest = mapper.readValue(input, ItemRest.class);
} catch (IOException e1) {
throw new UnprocessableEntityException("Error parsing request body", e1);
}
if (itemRest.getInArchive() == false) {
throw new BadRequestException("InArchive attribute should not be set to false for the create");
}
UUID owningCollectionUuid = UUIDUtils.fromString(owningCollectionUuidString);
Collection collection = collectionService.find(context, owningCollectionUuid);
if (collection == null) {
throw new BadRequestException("The given owningCollection parameter is invalid: "
+ owningCollectionUuid);
}
WorkspaceItem workspaceItem = workspaceItemService.create(context, collection, false);
Item item = workspaceItem.getItem();
item.setArchived(true);
item.setOwningCollection(collection);
item.setDiscoverable(itemRest.getDiscoverable());
item.setLastModified(itemRest.getLastModified());
metadataConverter.setMetadata(context, item, itemRest.getMetadata());
Item itemToReturn = installItemService.installItem(context, workspaceItem);
return dsoConverter.fromModel(itemToReturn);
}
@Override
@PreAuthorize("hasPermission(#id, 'ITEM', 'WRITE')")
protected ItemRest put(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
JsonNode jsonNode)
throws RepositoryMethodNotImplementedException, SQLException, AuthorizeException {
HttpServletRequest req = getRequestService().getCurrentRequest().getHttpServletRequest();
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = null;
try {
itemRest = mapper.readValue(jsonNode.toString(), ItemRest.class);
} catch (IOException e1) {
throw new UnprocessableEntityException("Error parsing request body", e1);
}
Item item = itemService.find(context, uuid);
if (item == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found");
}
if (StringUtils.equals(uuid.toString(), itemRest.getId())) {
metadataConverter.setMetadata(context, item, itemRest.getMetadata());
} else {
throw new IllegalArgumentException("The UUID in the Json and the UUID in the url do not match: "
+ uuid + ", "
+ itemRest.getId());
}
return dsoConverter.fromModel(item);
}
}

View File

@@ -7,22 +7,37 @@
*/
package org.dspace.app.rest.repository;
import static java.lang.Integer.parseInt;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.MetadataFieldConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.MetadataFieldRest;
import org.dspace.app.rest.model.hateoas.MetadataFieldResource;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
@@ -34,7 +49,7 @@ import org.springframework.stereotype.Component;
public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFieldRest, Integer> {
@Autowired
MetadataFieldService metaFieldService;
MetadataFieldService metadataFieldService;
@Autowired
MetadataSchemaService metadataSchemaService;
@@ -49,7 +64,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
public MetadataFieldRest findOne(Context context, Integer id) {
MetadataField metadataField = null;
try {
metadataField = metaFieldService.find(context, id);
metadataField = metadataFieldService.find(context, id);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
@@ -63,7 +78,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
public Page<MetadataFieldRest> findAll(Context context, Pageable pageable) {
List<MetadataField> metadataField = null;
try {
metadataField = metaFieldService.findAll(context);
metadataField = metadataFieldService.findAll(context);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
@@ -81,7 +96,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
if (schema == null) {
return null;
}
metadataFields = metaFieldService.findAllInSchema(context, schema);
metadataFields = metadataFieldService.findAllInSchema(context, schema);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
@@ -98,4 +113,112 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository<MetadataFi
public MetadataFieldResource wrapResource(MetadataFieldRest bs, String... rels) {
return new MetadataFieldResource(bs, utils, rels);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected MetadataFieldRest createAndReturn(Context context)
throws AuthorizeException, SQLException {
// parse request body
MetadataFieldRest metadataFieldRest;
try {
metadataFieldRest = new ObjectMapper().readValue(
getRequestService().getCurrentRequest().getHttpServletRequest().getInputStream(),
MetadataFieldRest.class
);
} catch (IOException excIO) {
throw new PatchBadRequestException("error parsing request body", excIO);
}
// validate fields
String schemaId = getRequestService().getCurrentRequest().getHttpServletRequest().getParameter("schemaId");
if (isBlank(schemaId)) {
throw new UnprocessableEntityException("metadata schema ID cannot be blank");
}
MetadataSchema schema = metadataSchemaService.find(context, parseInt(schemaId));
if (schema == null) {
throw new UnprocessableEntityException("metadata schema with ID " + schemaId + " not found");
}
if (isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
}
// create
MetadataField metadataField;
try {
metadataField = metadataFieldService.create(context, schema,
metadataFieldRest.getElement(), metadataFieldRest.getQualifier(), metadataFieldRest.getScopeNote());
metadataFieldService.update(context, metadataField);
} catch (NonUniqueMetadataException e) {
throw new UnprocessableEntityException(
"metadata field "
+ schema.getName() + "." + metadataFieldRest.getElement()
+ (metadataFieldRest.getQualifier() != null ? "." + metadataFieldRest.getQualifier() : "")
+ " already exists"
);
} catch (IOException e) {
throw new RuntimeException(e);
}
// return
return converter.convert(metadataField);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected void delete(Context context, Integer id) throws AuthorizeException {
try {
MetadataField metadataField = metadataFieldService.find(context, id);
if (metadataField == null) {
throw new ResourceNotFoundException("metadata field with id: " + id + " not found");
}
metadataFieldService.delete(context, metadataField);
} catch (SQLException e) {
throw new RuntimeException("error while trying to delete " + MetadataFieldRest.NAME + " with id: " + id, e);
}
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected MetadataFieldRest put(Context context, HttpServletRequest request, String apiCategory, String model,
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
MetadataFieldRest metadataFieldRest = new Gson().fromJson(jsonNode.toString(), MetadataFieldRest.class);
if (isBlank(metadataFieldRest.getElement())) {
throw new UnprocessableEntityException("metadata element (in request body) cannot be blank");
}
if (!Objects.equals(id, metadataFieldRest.getId())) {
throw new UnprocessableEntityException("ID in request body doesn't match path ID");
}
MetadataField metadataField = metadataFieldService.find(context, id);
if (metadataField == null) {
throw new ResourceNotFoundException("metadata field with id: " + id + " not found");
}
metadataField.setElement(metadataFieldRest.getElement());
metadataField.setQualifier(metadataFieldRest.getQualifier());
metadataField.setScopeNote(metadataFieldRest.getScopeNote());
try {
metadataFieldService.update(context, metadataField);
context.commit();
} catch (NonUniqueMetadataException e) {
throw new UnprocessableEntityException("metadata field "
+ metadataField.getMetadataSchema().getName() + "." + metadataFieldRest.getElement()
+ (metadataFieldRest.getQualifier() != null ? "." + metadataFieldRest.getQualifier() : "")
+ " already exists");
} catch (IOException e) {
throw new RuntimeException(e);
}
return converter.fromModel(metadataField);
}
}

View File

@@ -7,18 +7,32 @@
*/
package org.dspace.app.rest.repository;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.IOException;
import java.sql.SQLException;
import java.util.List;
import java.util.Objects;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import org.dspace.app.rest.converter.MetadataSchemaConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.MetadataSchemaRest;
import org.dspace.app.rest.model.hateoas.MetadataSchemaResource;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataSchema;
import org.dspace.content.NonUniqueMetadataException;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
@@ -30,7 +44,7 @@ import org.springframework.stereotype.Component;
public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataSchemaRest, Integer> {
@Autowired
MetadataSchemaService metaScemaService;
MetadataSchemaService metadataSchemaService;
@Autowired
MetadataSchemaConverter converter;
@@ -42,7 +56,7 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
public MetadataSchemaRest findOne(Context context, Integer id) {
MetadataSchema metadataSchema = null;
try {
metadataSchema = metaScemaService.find(context, id);
metadataSchema = metadataSchemaService.find(context, id);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
@@ -56,7 +70,7 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
public Page<MetadataSchemaRest> findAll(Context context, Pageable pageable) {
List<MetadataSchema> metadataSchema = null;
try {
metadataSchema = metaScemaService.findAll(context);
metadataSchema = metadataSchemaService.findAll(context);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
@@ -73,4 +87,100 @@ public class MetadataSchemaRestRepository extends DSpaceRestRepository<MetadataS
public MetadataSchemaResource wrapResource(MetadataSchemaRest bs, String... rels) {
return new MetadataSchemaResource(bs, utils, rels);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected MetadataSchemaRest createAndReturn(Context context)
throws AuthorizeException, SQLException {
// parse request body
MetadataSchemaRest metadataSchemaRest;
try {
metadataSchemaRest = new ObjectMapper().readValue(
getRequestService().getCurrentRequest().getHttpServletRequest().getInputStream(),
MetadataSchemaRest.class
);
} catch (IOException excIO) {
throw new PatchBadRequestException("error parsing request body", excIO);
}
// validate fields
if (isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank");
}
if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
}
// create
MetadataSchema metadataSchema;
try {
metadataSchema = metadataSchemaService.create(
context, metadataSchemaRest.getPrefix(), metadataSchemaRest.getNamespace()
);
metadataSchemaService.update(context, metadataSchema);
} catch (NonUniqueMetadataException e) {
throw new UnprocessableEntityException("metadata schema "
+ metadataSchemaRest.getPrefix() + "." + metadataSchemaRest.getNamespace() + " already exists");
}
// return
return converter.convert(metadataSchema);
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected void delete(Context context, Integer id) throws AuthorizeException {
try {
MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
metadataSchemaService.delete(context, metadataSchema);
} catch (SQLException e) {
throw new RuntimeException(
"error while trying to delete " + MetadataSchemaRest.NAME + " with id: " + id, e
);
}
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected MetadataSchemaRest put(Context context, HttpServletRequest request, String apiCategory, String model,
Integer id, JsonNode jsonNode) throws SQLException, AuthorizeException {
MetadataSchemaRest metadataSchemaRest = new Gson().fromJson(jsonNode.toString(), MetadataSchemaRest.class);
if (isBlank(metadataSchemaRest.getPrefix())) {
throw new UnprocessableEntityException("metadata schema name cannot be blank");
}
if (isBlank(metadataSchemaRest.getNamespace())) {
throw new UnprocessableEntityException("metadata schema namespace cannot be blank");
}
if (!Objects.equals(id, metadataSchemaRest.getId())) {
throw new UnprocessableEntityException("ID in request doesn't match path ID");
}
MetadataSchema metadataSchema = metadataSchemaService.find(context, id);
if (metadataSchema == null) {
throw new ResourceNotFoundException("metadata schema with id: " + id + " not found");
}
metadataSchema.setName(metadataSchemaRest.getPrefix());
metadataSchema.setNamespace(metadataSchemaRest.getNamespace());
try {
metadataSchemaService.update(context, metadataSchema);
context.commit();
} catch (NonUniqueMetadataException e) {
throw new UnprocessableEntityException("metadata schema "
+ metadataSchemaRest.getPrefix() + "." + metadataSchemaRest.getNamespace() + " already exists");
}
return converter.fromModel(metadataSchema);
}
}

View File

@@ -11,10 +11,14 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.converter.SiteConverter;
import org.dspace.app.rest.model.SiteRest;
import org.dspace.app.rest.model.hateoas.SiteResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Site;
import org.dspace.content.service.SiteService;
import org.dspace.core.Context;
@@ -22,6 +26,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
/**
@@ -31,16 +36,15 @@ import org.springframework.stereotype.Component;
*/
@Component(SiteRest.CATEGORY + "." + SiteRest.NAME)
public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> {
public class SiteRestRepository extends DSpaceObjectRestRepository<Site, SiteRest> {
private final SiteService sitesv;
@Autowired
SiteService sitesv;
@Autowired
SiteConverter converter;
public SiteRestRepository() {
public SiteRestRepository(SiteService dsoService,
SiteConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<SiteRest>() {});
this.sitesv = dsoService;
}
@Override
@@ -54,7 +58,7 @@ public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> {
if (site == null) {
return null;
}
return converter.fromModel(site);
return dsoConverter.fromModel(site);
}
@Override
@@ -66,10 +70,17 @@ public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> {
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
Page<SiteRest> page = new PageImpl<Site>(sites, pageable, total).map(converter);
Page<SiteRest> page = new PageImpl<Site>(sites, pageable, total).map(dsoConverter);
return page;
}
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override
public Class<SiteRest> getDomainClass() {
return SiteRest.class;

View File

@@ -30,7 +30,7 @@ public abstract class AbstractResourcePatch<R extends RestModel> {
* @throws UnprocessableEntityException
* @throws PatchBadRequestException
*/
public RestModel patch(R restModel, List<Operation> operations) {
public R patch(R restModel, List<Operation> operations) {
// Note: the list of possible operations is taken from JsonPatchConverter class. Does not implement
// test https://tools.ietf.org/html/rfc6902#section-4.6

View File

@@ -0,0 +1,81 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository.patch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.flipkart.zjsonpatch.JsonPatch;
import org.dspace.app.rest.converter.JsonPatchConverter;
import org.dspace.app.rest.model.DSpaceObjectRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch;
/**
* Base class for DSpaceObject-based PATCH operations, providing common functionality.
*
* @param <R> the type of DSpaceObjectRest object the class is applicable to.
*/
public abstract class DSpaceObjectPatch<R extends DSpaceObjectRest> extends AbstractResourcePatch<R> {
private static final String METADATA_PATH = "/metadata";
private ObjectMapper objectMapper = new ObjectMapper();
private JsonPatchConverter jsonPatchConverter = new JsonPatchConverter(objectMapper);
/**
* Applies the given patch operations to the given DSpaceObjectRest instance.
*
* This extends the default implementation by first applying metadata-based patch operations,
* then applying any others.
*
* @param dsoRest the instance to apply the changes to.
* @param operations the list of patch operations.
* @return the modified DSpaceObectRest instance.
*/
@Override
public R patch(R dsoRest, List<Operation> operations) {
List<Operation> metadataOperations = new ArrayList<>();
List<Operation> otherOperations = new ArrayList<>();
for (Operation operation : operations) {
String path = operation.getPath();
if (path.equals(METADATA_PATH) || path.startsWith(METADATA_PATH + "/")) {
metadataOperations.add(operation);
} else {
otherOperations.add(operation);
}
}
if (!metadataOperations.isEmpty()) {
dsoRest.setMetadata(applyMetadataPatch(
jsonPatchConverter.convert(new Patch(metadataOperations)),
dsoRest.getMetadata()));
}
return super.patch(dsoRest, otherOperations);
}
private MetadataRest applyMetadataPatch(JsonNode patch, MetadataRest metadataRest) {
try {
ObjectNode objectNode = objectMapper.createObjectNode();
JsonNode metadataNode = objectMapper.valueToTree(metadataRest);
objectNode.replace("metadata", metadataNode);
JsonPatch.applyInPlace(patch, objectNode);
return objectMapper.treeToValue(objectNode.get("metadata"), MetadataRest.class);
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
}

View File

@@ -20,7 +20,7 @@ import org.springframework.stereotype.Component;
* Provides patch operations for eperson updates.
*/
@Component
public class EPersonPatch extends AbstractResourcePatch<EPersonRest> {
public class EPersonPatch extends DSpaceObjectPatch<EPersonRest> {
@Autowired
EPersonOperationFactory patchFactory;

View File

@@ -20,7 +20,7 @@ import org.springframework.stereotype.Component;
* Provides PATCH operations for item updates.
*/
@Component
public class ItemPatch extends AbstractResourcePatch<ItemRest> {
public class ItemPatch extends DSpaceObjectPatch<ItemRest> {
@Autowired
ItemOperationFactory patchFactory;

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.utils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.CollectionRest;
import org.springframework.stereotype.Component;
/**
* This class will contain methods that can define in what way CollectionRest objects are equal
*/
@Component
public class CollectionRestEqualityUtils extends DSpaceObjectRestEqualityUtils {
/**
* This method will return a boolean indicating whether the given CollectionRest objects are equal
* through comparing their attributes
* @param original The original CollectionRest object
* @param updated The CollectionRest object that has to be checked for equality
* @return A boolean indicating whether they're equal or not
*/
public boolean isCollectionRestEqualWithoutMetadata(CollectionRest original, CollectionRest updated) {
return super.isDSpaceObjectEqualsWithoutMetadata(original, updated) &&
StringUtils.equals(original.getCategory(), updated.getCategory()) &&
StringUtils.equals(original.getType(), updated.getType());
}
}

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.utils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.CommunityRest;
import org.springframework.stereotype.Component;
/**
* This class will contain methods that can define in what way CommunityRest objects are equal
*/
@Component
public class CommunityRestEqualityUtils extends DSpaceObjectRestEqualityUtils {
/**
* This method will return a boolean indicating whether the given CommunityRest objects are equal
* through comparing their attributes
* @param original The original CommunityRest object
* @param updated The CommunityRest object that has to be checked for equality
* @return A boolean indicating whether they're equal or not
*/
public boolean isCommunityRestEqualWithoutMetadata(CommunityRest original, CommunityRest updated) {
return super.isDSpaceObjectEqualsWithoutMetadata(original, updated) &&
StringUtils.equals(original.getCategory(), updated.getCategory()) &&
StringUtils.equals(original.getType(), updated.getType());
}
}

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.utils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.app.rest.model.DSpaceObjectRest;
import org.springframework.stereotype.Component;
/**
* This class will contain methods that can define in what way DSpaceObjectRest objects are equal
*/
@Component
public class DSpaceObjectRestEqualityUtils {
/**
* This method will return a boolean indicating whether the given DSpaceObjectRest objects are equal
* through comparing their attributes
* @param original The original DSpaceObjectRest object
* @param updated The DSpaceObjectRest object that has to be checked for equality
* @return A boolean indicating whether they're equal or not
*/
public boolean isDSpaceObjectEqualsWithoutMetadata(DSpaceObjectRest original, DSpaceObjectRest updated) {
return StringUtils.equals(original.getId(), updated.getId()) &&
StringUtils.equals(original.getCategory(), updated.getCategory()) &&
StringUtils.equals(original.getHandle(), updated.getHandle());
}
}

View File

@@ -27,11 +27,13 @@ import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.matcher.BitstreamFormatMatcher;
import org.dspace.app.rest.matcher.BitstreamMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.content.service.BitstreamService;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers;
import org.junit.Ignore;
import org.junit.Test;
@@ -587,4 +589,25 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
getClient(token).perform(delete("/api/core/bitstreams/" + col.getLogo().getID()))
.andExpect(status().is(422));
}
@Test
public void patchBitstreamMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchBitstreamMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community")
.build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/bitstreams/"
+ parentCommunity.getLogo().getID(), expectedStatus);
}
}

View File

@@ -7,10 +7,10 @@
*/
package org.dspace.app.rest;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
@@ -31,6 +31,7 @@ import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.eperson.Group;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.springframework.test.web.servlet.result.MockMvcResultHandlers;
@@ -380,14 +381,10 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe
"An embargoed publication",
"2017-08-10"))))
//The private item must not be present
.andExpect(jsonPath("$._embedded.items[*].metadata[?(@.key=='dc.title')].value",
not(hasItem("This is a private item"))))
//The internal item must not be present
.andExpect(jsonPath("$._embedded.items[*].metadata[?(@.key=='dc.title')].value",
not(hasItem("Internal publication"))))
;
//The private and internal items must not be present
.andExpect(jsonPath("$._embedded.items[*].metadata", Matchers.allOf(
not(matchMetadata("dc.title", "This is a private item")),
not(matchMetadata("dc.title", "Internal publication")))));
}
@Test

View File

@@ -7,25 +7,48 @@
*/
package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.UUID;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.converter.CollectionConverter;
import org.dspace.app.rest.matcher.CollectionMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.Constants;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTest {
@Autowired
CollectionConverter collectionConverter;
@Autowired
AuthorizeService authorizeService;
@Test
public void findAllTest() throws Exception {
@@ -276,4 +299,351 @@ public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTes
.andExpect(status().isNotFound());
}
@Test
public void findCollectionWithParentCommunity() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community Two")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child2).withName("Collection 2").build();
getClient().perform(get("/api/core/collections/" + col1.getID()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$", Matchers.not(
is(
CollectionMatcher.matchCollectionEntry(col2.getName(), col2.getID(), col2.getHandle())
))));
}
@Test
public void updateTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
getClient().perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/collections")))
;
String token = getAuthToken(admin.getEmail(), password);
ObjectMapper mapper = new ObjectMapper();
CollectionRest collectionRest = collectionConverter.fromModel(col1);
collectionRest.setMetadata(new MetadataRest()
.put("dc.title", new MetadataValueRest("Electronic theses and dissertations")));
getClient(token).perform(put("/api/core/collections/" + col1.getID().toString())
.contentType(MediaType.APPLICATION_JSON)
.content(mapper.writeValueAsBytes(collectionRest)))
.andExpect(status().isOk())
;
getClient().perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry("Electronic theses and dissertations",
col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections")))
;
}
@Test
public void deleteTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
Community parentCommunity2 = CommunityBuilder.createCommunity(context)
.withName("Parent Community 2")
.withLogo("SomeTest")
.build();
Community parentCommunityChild1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunityChild1)
.withName("Collection 1")
.build();
String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections"))) ;
getClient(token).perform(delete("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isNoContent())
;
getClient(token).perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isNotFound())
;
}
@Test
public void deleteTestUnAuthorized() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
Community parentCommunity2 = CommunityBuilder.createCommunity(context)
.withName("Parent Community 2")
.withLogo("SomeTest")
.build();
Community parentCommunityChild1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunityChild1)
.withName("Collection 1")
.build();
getClient().perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections"))) ;
getClient().perform(delete("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isUnauthorized())
;
}
@Test
public void createTest() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
ObjectMapper mapper = new ObjectMapper();
CollectionRest collectionRest = new CollectionRest();
// We send a name but the created collection should set this to the title
collectionRest.setName("Collection");
collectionRest.setMetadata(new MetadataRest()
.put("dc.description",
new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract",
new MetadataValueRest("Sample top-level community created via the REST API"))
.put("dc.description.tableofcontents",
new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights",
new MetadataValueRest("Custom Copyright Text"))
.put("dc.title",
new MetadataValueRest("Title Text")));
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken).perform(post("/api/core/collections")
.content(mapper.writeValueAsBytes(collectionRest))
.param("parent", parentCommunity.getID().toString())
.contentType(contentType))
.andExpect(status().isCreated())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", not(empty())),
hasJsonPath("$.uuid", not(empty())),
hasJsonPath("$.name", is("Title Text")),
hasJsonPath("$.handle", not(empty())),
hasJsonPath("$.type", is("collection")),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample top-level community created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
MetadataMatcher.matchMetadata("dc.rights",
"Custom Copyright Text"),
MetadataMatcher.matchMetadata("dc.title",
"Title Text")
)))));
}
@Test
public void deleteCollectionEpersonWithDeleteRightsTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
Community parentCommunity2 = CommunityBuilder.createCommunity(context)
.withName("Parent Community 2")
.withLogo("SomeTest")
.build();
Community parentCommunityChild1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunityChild1)
.withName("Collection 1")
.build();
context.setCurrentUser(eperson);
authorizeService.addPolicy(context, col1, Constants.DELETE, eperson);
authorizeService.addPolicy(context, col1, Constants.WRITE, eperson);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections"))) ;
getClient(token).perform(delete("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isNoContent())
;
getClient(token).perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isNotFound())
;
authorizeService.removePoliciesActionFilter(context, eperson, Constants.DELETE);
authorizeService.removePoliciesActionFilter(context, eperson, Constants.WRITE);
}
@Test
public void updateCollectionEpersonWithWriteRightsTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
getClient().perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/collections")))
;
context.setCurrentUser(eperson);
authorizeService.addPolicy(context, col1, Constants.WRITE, eperson);
String token = getAuthToken(eperson.getEmail(), password);
ObjectMapper mapper = new ObjectMapper();
CollectionRest collectionRest = collectionConverter.fromModel(col1);
collectionRest.setMetadata(new MetadataRest()
.put("dc.title", new MetadataValueRest("Electronic theses and dissertations")));
getClient(token).perform(put("/api/core/collections/" + col1.getID().toString())
.contentType(MediaType.APPLICATION_JSON)
.content(mapper.writeValueAsBytes(collectionRest)))
.andExpect(status().isOk())
;
getClient().perform(get("/api/core/collections/" + col1.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CollectionMatcher.matchCollectionEntry("Electronic theses and dissertations",
col1.getID(), col1.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/collections")))
;
authorizeService.removePoliciesActionFilter(context, eperson, Constants.WRITE);
}
public void patchCollectionMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchCollectionMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build();
Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/collections/" + col.getID(), expectedStatus);
}
}

View File

@@ -8,34 +8,50 @@
package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.UUID;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.builder.CollectionBuilder;
import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.converter.CommunityConverter;
import org.dspace.app.rest.matcher.CommunityMatcher;
import org.dspace.app.rest.matcher.CommunityMetadataMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.core.Constants;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest {
@Autowired
CommunityConverter communityConverter;
@Autowired
AuthorizeService authorizeService;
@Test
public void createTest() throws Exception {
context.turnOffAuthorisationSystem();
@@ -45,31 +61,29 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
// We send a name but the created community should set this to the title
comm.setName("Test Top-Level Community");
MetadataEntryRest description = new MetadataEntryRest();
description.setKey("dc.description");
MetadataRest metadataRest = new MetadataRest();
MetadataValueRest description = new MetadataValueRest();
description.setValue("<p>Some cool HTML code here</p>");
metadataRest.put("dc.description", description);
MetadataEntryRest abs = new MetadataEntryRest();
abs.setKey("dc.description.abstract");
MetadataValueRest abs = new MetadataValueRest();
abs.setValue("Sample top-level community created via the REST API");
metadataRest.put("dc.description.abstract", abs);
MetadataEntryRest contents = new MetadataEntryRest();
contents.setKey("dc.description.tableofcontents");
MetadataValueRest contents = new MetadataValueRest();
contents.setValue("<p>HTML News</p>");
metadataRest.put("dc.description.tableofcontents", contents);
MetadataEntryRest copyright = new MetadataEntryRest();
copyright.setKey("dc.rights");
MetadataValueRest copyright = new MetadataValueRest();
copyright.setValue("Custom Copyright Text");
metadataRest.put("dc.rights", copyright);
MetadataEntryRest title = new MetadataEntryRest();
title.setKey("dc.title");
MetadataValueRest title = new MetadataValueRest();
title.setValue("Title Text");
metadataRest.put("dc.title", title);
comm.setMetadata(Arrays.asList(description,
abs,
contents,
copyright,
title));
comm.setMetadata(metadataRest);
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken).perform(post("/api/core/communities")
@@ -87,21 +101,79 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
hasJsonPath("$._links.logo.href", not(empty())),
hasJsonPath("$._links.subcommunities.href", not(empty())),
hasJsonPath("$._links.self.href", not(empty())),
hasJsonPath("$.metadata", Matchers.containsInAnyOrder(
CommunityMetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
CommunityMetadataMatcher.matchMetadata("dc.description.abstract",
hasJsonPath("$.metadata", Matchers.allOf(
matchMetadata("dc.description", "<p>Some cool HTML code here</p>"),
matchMetadata("dc.description.abstract",
"Sample top-level community created via the REST API"),
CommunityMetadataMatcher.matchMetadata("dc.description.tableofcontents",
matchMetadata("dc.description.tableofcontents", "<p>HTML News</p>"),
matchMetadata("dc.rights", "Custom Copyright Text"),
matchMetadata("dc.title", "Title Text")
)))));
}
@Test
public void createWithParentTest() throws Exception {
context.turnOffAuthorisationSystem();
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
ObjectMapper mapper = new ObjectMapper();
CommunityRest comm = new CommunityRest();
// We send a name but the created community should set this to the title
comm.setName("Test Sub-Level Community");
comm.setMetadata(new MetadataRest()
.put("dc.description",
new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract",
new MetadataValueRest("Sample top-level community created via the REST API"))
.put("dc.description.tableofcontents",
new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights",
new MetadataValueRest("Custom Copyright Text"))
.put("dc.title",
new MetadataValueRest("Title Text")));
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken).perform(post("/api/core/communities")
.content(mapper.writeValueAsBytes(comm))
.param("parent", parentCommunity.getID().toString())
.contentType(contentType))
.andExpect(status().isCreated())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", not(empty())),
hasJsonPath("$.uuid", not(empty())),
hasJsonPath("$.name", is("Title Text")),
hasJsonPath("$.handle", not(empty())),
hasJsonPath("$.type", is("community")),
hasJsonPath("$._links.collections.href", not(empty())),
hasJsonPath("$._links.logo.href", not(empty())),
hasJsonPath("$._links.subcommunities.href", not(empty())),
hasJsonPath("$._links.self.href", not(empty())),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample top-level community created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
CommunityMetadataMatcher.matchMetadata("dc.rights",
MetadataMatcher.matchMetadata("dc.rights",
"Custom Copyright Text"),
CommunityMetadataMatcher.matchMetadata("dc.title",
MetadataMatcher.matchMetadata("dc.title",
"Title Text")
)))));
}
@Test
public void createUnauthorizedTest() throws Exception {
context.turnOffAuthorisationSystem();
@@ -110,11 +182,13 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
CommunityRest comm = new CommunityRest();
comm.setName("Test Top-Level Community");
MetadataEntryRest title = new MetadataEntryRest();
title.setKey("dc.title");
title.setValue("Title Text");
MetadataRest metadataRest = new MetadataRest();
comm.setMetadata(Arrays.asList(title));
MetadataValueRest title = new MetadataValueRest();
title.setValue("Title Text");
metadataRest.put("dc.title", title);
comm.setMetadata(metadataRest);
// Anonymous user tries to create a community.
// Should fail because user is not authenticated. Error 401.
@@ -511,4 +585,291 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
getClient().perform(get("/api/core/communities/" + UUID.randomUUID())).andExpect(status().isNotFound());
}
@Test
public void updateTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry(parentCommunity.getName(), parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$", Matchers.not(
Matchers.is(
CommunityMatcher.matchCommunityEntry(child1.getName(), child1.getID(), child1.getHandle())
)
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
;
String token = getAuthToken(admin.getEmail(), password);
ObjectMapper mapper = new ObjectMapper();
CommunityRest communityRest = communityConverter.fromModel(parentCommunity);
communityRest.setMetadata(new MetadataRest()
.put("dc.title", new MetadataValueRest("Electronic theses and dissertations")));
getClient(token).perform(put("/api/core/communities/" + parentCommunity.getID().toString())
.contentType(MediaType.APPLICATION_JSON)
.content(mapper.writeValueAsBytes(communityRest)))
.andExpect(status().isOk())
;
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry("Electronic theses and dissertations",
parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
;
}
@Test
public void deleteTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
Community parentCommunity2 = CommunityBuilder.createCommunity(context)
.withName("Parent Community 2")
.withLogo("SomeTest")
.build();
Community parentCommunityChild1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Community parentCommunityChild2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community2")
.build();
Community parentCommunityChild2Child1 = CommunityBuilder.createSubCommunity(context, parentCommunityChild2)
.withName("Sub Sub Community")
.build();
Community parentCommunity2Child1 = CommunityBuilder.createSubCommunity(context, parentCommunity2)
.withName("Sub2 Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunityChild1)
.withName("Collection 1")
.build();
String token = getAuthToken(admin.getEmail(), password);
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry(parentCommunity.getName(), parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/communities"))) ;
getClient(token).perform(delete("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isNoContent())
;
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isNotFound())
;
getClient(token).perform(get("/api/core/communities/" + parentCommunityChild1.getID().toString()))
.andExpect(status().isNotFound())
;
}
@Test
public void deleteTestUnAuthorized() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.withLogo("ThisIsSomeDummyText")
.build();
Community parentCommunity2 = CommunityBuilder.createCommunity(context)
.withName("Parent Community 2")
.withLogo("SomeTest")
.build();
Community parentCommunityChild1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Community parentCommunityChild2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community2")
.build();
Community parentCommunityChild2Child1 = CommunityBuilder.createSubCommunity(context, parentCommunityChild2)
.withName("Sub Sub Community")
.build();
Community parentCommunity2Child1 = CommunityBuilder.createSubCommunity(context, parentCommunity2)
.withName("Sub2 Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, parentCommunityChild1)
.withName("Collection 1")
.build();
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry(parentCommunity.getName(), parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/communities"))) ;
getClient().perform(delete("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isUnauthorized())
;
}
@Test
public void deleteCommunityEpersonWithDeleteRightsTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
context.setCurrentUser(eperson);
authorizeService.addPolicy(context, parentCommunity, Constants.DELETE, eperson);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry(parentCommunity.getName(), parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$._links.self.href",
Matchers.containsString("/api/core/communities"))) ;
getClient(token).perform(delete("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isNoContent())
;
getClient(token).perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isNotFound())
;
authorizeService.removePoliciesActionFilter(context, eperson, Constants.DELETE);
}
@Test
public void updateCommunityEpersonWithWriteRightsTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry(parentCommunity.getName(), parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$", Matchers.not(
Matchers.is(
CommunityMatcher.matchCommunityEntry(child1.getName(), child1.getID(), child1.getHandle())
)
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
;
ObjectMapper mapper = new ObjectMapper();
CommunityRest communityRest = communityConverter.fromModel(parentCommunity);
communityRest.setMetadata(new MetadataRest()
.put("dc.title", new MetadataValueRest("Electronic theses and dissertations")));
context.setCurrentUser(eperson);
authorizeService.addPolicy(context, parentCommunity, Constants.WRITE, eperson);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token).perform(put("/api/core/communities/" + parentCommunity.getID().toString())
.contentType(MediaType.APPLICATION_JSON)
.content(mapper.writeValueAsBytes(communityRest)))
.andExpect(status().isOk())
;
getClient().perform(get("/api/core/communities/" + parentCommunity.getID().toString()))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$", Matchers.is(
CommunityMatcher.matchCommunityEntry("Electronic theses and dissertations",
parentCommunity.getID(),
parentCommunity.getHandle())
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/communities")))
;
authorizeService.removePoliciesActionFilter(context, eperson, Constants.DELETE);
}
public void patchCommunityMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchCommunityMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/communities/"
+ parentCommunity.getID(), expectedStatus);
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
@@ -21,7 +22,6 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import javax.ws.rs.core.MediaType;
@@ -32,12 +32,13 @@ import org.dspace.app.rest.builder.CommunityBuilder;
import org.dspace.app.rest.builder.EPersonBuilder;
import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.matcher.EPersonMatcher;
import org.dspace.app.rest.matcher.EPersonMetadataMatcher;
import org.dspace.app.rest.model.EPersonRest;
import org.dspace.app.rest.model.MetadataEntryRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.ReplaceOperation;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.eperson.EPerson;
@@ -54,15 +55,16 @@ public class EPersonRestRepositoryIT extends AbstractControllerIntegrationTest {
// we should check how to get it from Spring
ObjectMapper mapper = new ObjectMapper();
EPersonRest data = new EPersonRest();
MetadataRest metadataRest = new MetadataRest();
data.setEmail("createtest@fake-email.com");
data.setCanLogIn(true);
MetadataEntryRest surname = new MetadataEntryRest();
surname.setKey("eperson.lastname");
MetadataValueRest surname = new MetadataValueRest();
surname.setValue("Doe");
MetadataEntryRest firstname = new MetadataEntryRest();
firstname.setKey("eperson.firstname");
metadataRest.put("eperson.lastname", surname);
MetadataValueRest firstname = new MetadataValueRest();
firstname.setValue("John");
data.setMetadata(Arrays.asList(surname, firstname));
metadataRest.put("eperson.firstname", firstname);
data.setMetadata(metadataRest);
String authToken = getAuthToken(admin.getEmail(), password);
getClient(authToken).perform(post("/api/eperson/epersons")
@@ -78,9 +80,9 @@ public class EPersonRestRepositoryIT extends AbstractControllerIntegrationTest {
hasJsonPath("$.canLogIn", is(true)),
hasJsonPath("$.requireCertificate", is(false)),
hasJsonPath("$._links.self.href", not(empty())),
hasJsonPath("$.metadata", Matchers.containsInAnyOrder(
EPersonMetadataMatcher.matchFirstName("John"),
EPersonMetadataMatcher.matchLastName("Doe")
hasJsonPath("$.metadata", Matchers.allOf(
matchMetadata("eperson.firstname", "John"),
matchMetadata("eperson.lastname", "Doe")
)))));
// TODO cleanup the context!!!
}
@@ -1030,4 +1032,23 @@ public class EPersonRestRepositoryIT extends AbstractControllerIntegrationTest {
}
@Test
public void patchEPersonMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchEPersonMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
EPerson ePerson = EPersonBuilder.createEPerson(context).withEmail("user@test.com").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/eperson/epersons/" + ePerson.getID(), expectedStatus);
}
}

View File

@@ -22,6 +22,8 @@ import org.dspace.app.rest.builder.GroupBuilder;
import org.dspace.app.rest.matcher.GroupMatcher;
import org.dspace.app.rest.model.GroupRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.hamcrest.Matchers;
import org.junit.Test;
@@ -234,4 +236,22 @@ public class GroupRestRepositoryIT extends AbstractControllerIntegrationTest {
;
}
@Test
public void patchGroupMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchGroupMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
Group group = GroupBuilder.createGroup(context).withName("Group").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/eperson/groups/" + group.getID(), expectedStatus);
}
}

View File

@@ -7,10 +7,13 @@
*/
package org.dspace.app.rest;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@@ -18,9 +21,11 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.ws.rs.core.MediaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.CharEncoding;
import org.dspace.app.rest.builder.BitstreamBuilder;
@@ -31,9 +36,14 @@ import org.dspace.app.rest.builder.GroupBuilder;
import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.builder.WorkspaceItemBuilder;
import org.dspace.app.rest.matcher.ItemMatcher;
import org.dspace.app.rest.matcher.MetadataMatcher;
import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.ReplaceOperation;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -43,6 +53,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.springframework.test.web.servlet.MvcResult;
public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
@@ -909,7 +920,6 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
}
@Test
public void useStringForBooleanTest() throws Exception {
context.turnOffAuthorisationSystem();
@@ -1414,4 +1424,348 @@ public class ItemRestRepositoryIT extends AbstractControllerIntegrationTest {
Matchers.containsString("/api/core/items")));
}
@Test
public void testCreateItem() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and two collections.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = new ItemRest();
itemRest.setName("Practices of research data curation in institutional repositories:" +
" A qualitative view from repository staff");
itemRest.setInArchive(true);
itemRest.setDiscoverable(true);
itemRest.setWithdrawn(false);
itemRest.setMetadata(new MetadataRest()
.put("dc.description", new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract", new MetadataValueRest("Sample item created via the REST API"))
.put("dc.description.tableofcontents", new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights", new MetadataValueRest("Custom Copyright Text"))
.put("dc.title", new MetadataValueRest("Title Text")));
String token = getAuthToken(admin.getEmail(), password);
MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" +
col1.getID().toString())
.content(mapper.writeValueAsBytes(itemRest)).contentType(contentType))
.andExpect(status().isCreated())
.andReturn();
String content = mvcResult.getResponse().getContentAsString();
Map<String,Object> map = mapper.readValue(content, Map.class);
String itemUuidString = String.valueOf(map.get("uuid"));
String itemHandleString = String.valueOf(map.get("handle"));
//TODO Refactor this to use the converter to Item instead of checking every property separately
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", is(itemUuidString)),
hasJsonPath("$.uuid", is(itemUuidString)),
hasJsonPath("$.name", is("Title Text")),
hasJsonPath("$.handle", is(itemHandleString)),
hasJsonPath("$.type", is("item")),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample item created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
MetadataMatcher.matchMetadata("dc.rights",
"Custom Copyright Text"),
MetadataMatcher.matchMetadata("dc.title",
"Title Text")
)))));
}
@Test
public void updateTest() throws Exception {
//We turn off the authorization system in order to create the structure as defined below
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and two collections.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = new ItemRest();
itemRest.setName("Practices of research data curation in institutional repositories:" +
" A qualitative view from repository staff");
itemRest.setInArchive(true);
itemRest.setDiscoverable(true);
itemRest.setWithdrawn(false);
String token = getAuthToken(admin.getEmail(), password);
MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" +
col1.getID().toString())
.content(mapper.writeValueAsBytes(itemRest))
.contentType(contentType))
.andExpect(status().isCreated())
.andReturn();
String content = mvcResult.getResponse().getContentAsString();
Map<String,Object> map = mapper.readValue(content, Map.class);
String itemUuidString = String.valueOf(map.get("uuid"));
String itemHandleString = String.valueOf(map.get("handle"));
itemRest.setMetadata(new MetadataRest()
.put("dc.description", new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract", new MetadataValueRest("Sample item created via the REST API"))
.put("dc.description.tableofcontents", new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights", new MetadataValueRest("New Custom Copyright Text"))
.put("dc.title", new MetadataValueRest("New title")));
itemRest.setUuid(itemUuidString);
itemRest.setHandle(itemHandleString);
mvcResult = getClient(token).perform(put("/api/core/items/" + itemUuidString)
.content(mapper.writeValueAsBytes(itemRest))
.contentType(contentType))
.andExpect(status().isOk())
.andReturn();
map = mapper.readValue(content, Map.class);
itemUuidString = String.valueOf(map.get("uuid"));
itemHandleString = String.valueOf(map.get("handle"));
//TODO Refactor this to use the converter to Item instead of checking every property separately
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", is(itemUuidString)),
hasJsonPath("$.uuid", is(itemUuidString)),
hasJsonPath("$.name", is("New title")),
hasJsonPath("$.handle", is(itemHandleString)),
hasJsonPath("$.type", is("item")),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample item created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
MetadataMatcher.matchMetadata("dc.rights",
"New Custom Copyright Text"),
MetadataMatcher.matchMetadata("dc.title",
"New title")
)))));
}
@Test
public void testDeleteItem() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and two collections.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = new ItemRest();
itemRest.setName("Practices of research data curation in institutional repositories:" +
" A qualitative view from repository staff");
itemRest.setInArchive(true);
itemRest.setDiscoverable(true);
itemRest.setWithdrawn(false);
itemRest.setMetadata(new MetadataRest()
.put("dc.description", new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract", new MetadataValueRest("Sample item created via the REST API"))
.put("dc.description.tableofcontents", new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights", new MetadataValueRest("Custom Copyright Text"))
.put("dc.title", new MetadataValueRest("Title Text")));
String token = getAuthToken(admin.getEmail(), password);
MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" +
col1.getID().toString())
.content(mapper.writeValueAsBytes(itemRest))
.contentType(contentType))
.andExpect(status().isCreated())
.andReturn();
String content = mvcResult.getResponse().getContentAsString();
Map<String,Object> map = mapper.readValue(content, Map.class);
String itemUuidString = String.valueOf(map.get("uuid"));
String itemHandleString = String.valueOf(map.get("handle"));
//TODO Refactor this to use the converter to Item instead of checking every property separately
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", is(itemUuidString)),
hasJsonPath("$.uuid", is(itemUuidString)),
hasJsonPath("$.name", is("Title Text")),
hasJsonPath("$.handle", is(itemHandleString)),
hasJsonPath("$.type", is("item")),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample item created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
MetadataMatcher.matchMetadata("dc.rights",
"Custom Copyright Text"),
MetadataMatcher.matchMetadata("dc.title",
"Title Text")
)))));
getClient(token).perform(delete("/api/core/items/" + itemUuidString))
.andExpect(status().isNoContent());
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isNotFound());
}
@Test
public void testDeleteItemUnauthorized() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community-collection structure with one parent community with sub-community and two collections.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
ObjectMapper mapper = new ObjectMapper();
ItemRest itemRest = new ItemRest();
itemRest.setName("Practices of research data curation in institutional repositories:" +
" A qualitative view from repository staff");
itemRest.setInArchive(true);
itemRest.setDiscoverable(true);
itemRest.setWithdrawn(false);
itemRest.setMetadata(new MetadataRest()
.put("dc.description", new MetadataValueRest("<p>Some cool HTML code here</p>"))
.put("dc.description.abstract", new MetadataValueRest("Sample item created via the REST API"))
.put("dc.description.tableofcontents", new MetadataValueRest("<p>HTML News</p>"))
.put("dc.rights", new MetadataValueRest("Custom Copyright Text"))
.put("dc.title", new MetadataValueRest("Title Text")));
String token = getAuthToken(admin.getEmail(), password);
MvcResult mvcResult = getClient(token).perform(post("/api/core/items?owningCollection=" +
col1.getID().toString())
.content(mapper.writeValueAsBytes(itemRest))
.contentType(contentType))
.andExpect(status().isCreated())
.andReturn();
String content = mvcResult.getResponse().getContentAsString();
Map<String,Object> map = mapper.readValue(content, Map.class);
String itemUuidString = String.valueOf(map.get("uuid"));
String itemHandleString = String.valueOf(map.get("handle"));
//TODO Refactor this to use the converter to Item instead of checking every property separately
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers.allOf(
hasJsonPath("$.id", is(itemUuidString)),
hasJsonPath("$.uuid", is(itemUuidString)),
hasJsonPath("$.name", is("Title Text")),
hasJsonPath("$.handle", is(itemHandleString)),
hasJsonPath("$.type", is("item")),
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.description",
"<p>Some cool HTML code here</p>"),
MetadataMatcher.matchMetadata("dc.description.abstract",
"Sample item created via the REST API"),
MetadataMatcher.matchMetadata("dc.description.tableofcontents",
"<p>HTML News</p>"),
MetadataMatcher.matchMetadata("dc.rights",
"Custom Copyright Text"),
MetadataMatcher.matchMetadata("dc.title",
"Title Text")
)))));
getClient().perform(delete("/api/core/items/" + itemUuidString))
.andExpect(status().isUnauthorized());
getClient(token).perform(get("/api/core/items/" + itemUuidString))
.andExpect(status().isOk());
}
@Test
public void deleteOneWrongUuidResourceNotFoundTest() throws Exception {
context.turnOffAuthorisationSystem();
//** GIVEN **
//1. A community with one collection.
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Collection col1 = CollectionBuilder
.createCollection(context, parentCommunity).withName("Collection 1").build();
//2. One public item, one workspace item and one template item.
Item publicItem = ItemBuilder.createItem(context, col1)
.withTitle("Public item 1")
.withIssueDate("2017-10-17")
.withAuthor("Smith, Donald").withAuthor("Doe, John")
.withSubject("ExtraEntry")
.build();
String token = getAuthToken(admin.getEmail(), password);
//Delete public item
getClient(token).perform(delete("/api/core/items/" + parentCommunity.getID()))
.andExpect(status().is(404));
}
public void patchItemMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchItemMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Parent Community").build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community").build();
Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build();
Item item = ItemBuilder.createItem(context, col1).build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/items/" + item.getID(), expectedStatus);
}
}

View File

@@ -7,50 +7,258 @@
*/
package org.dspace.app.rest;
import static com.jayway.jsonpath.JsonPath.read;
import static org.dspace.app.rest.matcher.MetadataschemaMatcher.matchEntry;
import static org.hamcrest.Matchers.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.builder.MetadataSchemaBuilder;
import org.dspace.app.rest.converter.MetadataSchemaConverter;
import org.dspace.app.rest.matcher.MetadataschemaMatcher;
import org.dspace.app.rest.model.MetadataSchemaRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.content.MetadataSchema;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Integration tests for the {@link org.dspace.app.rest.repository.MetadataSchemaRestRepository}
* This class will include all the tests for the logic with regards to the
* {@link org.dspace.app.rest.repository.MetadataSchemaRestRepository}
*/
public class MetadataSchemaRestRepositoryIT extends AbstractControllerIntegrationTest {
private static final String TEST_NAME = "testSchemaName";
private static final String TEST_NAMESPACE = "testSchemaNameSpace";
private static final String TEST_NAME_UPDATED = "testSchemaNameUpdated";
private static final String TEST_NAMESPACE_UPDATED = "testSchemaNameSpaceUpdated";
@Autowired
MetadataSchemaConverter metadataSchemaConverter;
@Test
public void findAll() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadataschemas"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._embedded.metadataschemas", Matchers.hasItem(
MetadataschemaMatcher.matchEntry()
matchEntry()
)))
.andExpect(jsonPath("$._links.self.href", Matchers.containsString("/api/core/metadataschemas")))
.andExpect(jsonPath("$.page.size", is(20)));
}
@Test
public void findOne() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", is(
MetadataschemaMatcher.matchEntry(metadataSchema)
matchEntry(metadataSchema)
)));
}
@Test
public void createSuccess() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "ANamespace")
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = metadataSchemaConverter.fromModel(metadataSchema);
metadataSchemaRest.setPrefix(TEST_NAME);
metadataSchemaRest.setNamespace(TEST_NAMESPACE);
String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>();
getClient(authToken)
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isCreated())
.andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.id")));
getClient().perform(get("/api/core/metadataschemas/" + idRef.get()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher.matchEntry(TEST_NAME, TEST_NAMESPACE)));
}
@Test
public void createUnauthorizedTest()
throws Exception {
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setPrefix(TEST_NAME);
metadataSchemaRest.setNamespace(TEST_NAMESPACE);
getClient()
.perform(post("/api/core/metadataschemas")
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnauthorized());
}
@Test
public void deleteSuccess() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "ATest", "A namespace")
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk());
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isNoContent());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isNotFound());
}
@Test
public void deleteUnauthorized() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())).andExpect(status().isOk());
getClient()
.perform(delete("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isUnauthorized());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID())).andExpect(status().isOk());
}
@Test
public void deleteNonExisting() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, "A name", "A namespace")
.build();
context.restoreAuthSystemState();
Integer id = metadataSchema.getID();
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadataschemas/" + id))
.andExpect(status().isNoContent());
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadataschemas/" + id))
.andExpect(status().isNotFound());
}
@Test
public void update() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadataschemas/" + metadataSchema.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isOk());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME_UPDATED, TEST_NAMESPACE_UPDATED)));
}
@Test
public void updateUnauthorized() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient()
.perform(put("/api/core/metadataschemas/" + metadataSchema.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isUnauthorized());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME, TEST_NAMESPACE)));
}
@Test
public void updateWrongRights() throws Exception {
context.turnOffAuthorisationSystem();
MetadataSchema metadataSchema = MetadataSchemaBuilder.createMetadataSchema(context, TEST_NAME, TEST_NAMESPACE)
.build();
context.restoreAuthSystemState();
MetadataSchemaRest metadataSchemaRest = new MetadataSchemaRest();
metadataSchemaRest.setId(metadataSchema.getID());
metadataSchemaRest.setPrefix(TEST_NAME_UPDATED);
metadataSchemaRest.setNamespace(TEST_NAMESPACE_UPDATED);
getClient(getAuthToken(eperson.getEmail(), password))
.perform(put("/api/core/metadataschemas/" + metadataSchema.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataSchemaRest))
.contentType(contentType))
.andExpect(status().isForbidden());
getClient().perform(get("/api/core/metadataschemas/" + metadataSchema.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataschemaMatcher
.matchEntry(TEST_NAME, TEST_NAMESPACE)));
}
}

View File

@@ -7,23 +7,63 @@
*/
package org.dspace.app.rest;
import static com.jayway.jsonpath.JsonPath.read;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.dspace.app.rest.builder.MetadataFieldBuilder;
import org.dspace.app.rest.builder.MetadataSchemaBuilder;
import org.dspace.app.rest.matcher.MetadataFieldMatcher;
import org.dspace.app.rest.model.MetadataFieldRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataFieldServiceImpl;
import org.dspace.content.MetadataSchema;
import org.dspace.content.service.MetadataSchemaService;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Integration tests for the {@link org.dspace.app.rest.repository.MetadataFieldRestRepository}
* This class will include all the tests for the logic with regards to the
* {@link org.dspace.app.rest.repository.MetadataFieldRestRepository}
*/
public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegrationTest {
private static final String ELEMENT = "test element";
private static final String QUALIFIER = "test qualifier";
private static final String SCOPE_NOTE = "test scope_note";
private static final String ELEMENT_UPDATED = "test element updated";
private static final String QUALIFIER_UPDATED = "test qualifier updated";
private static final String SCOPE_NOTE_UPDATED = "test scope_note updated";
private MetadataSchema metadataSchema;
@Autowired
private MetadataSchemaService metadataSchemaService;
@Autowired
private MetadataFieldServiceImpl metadataFieldService;
@Before
public void setup() throws Exception {
metadataSchema = metadataSchemaService.findAll(context).get(0);
}
@Test
public void findAll() throws Exception {
@@ -31,6 +71,7 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder
.createMetadataField(context, "AnElement", "AQualifier", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadatafields")
.param("size", String.valueOf(100)))
@@ -54,6 +95,7 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder
.createMetadataField(context, "AnElement", "AQualifier", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
@@ -65,13 +107,14 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
@Test
public void searchMethodsExist() throws Exception {
getClient().perform(get("/api/core/metadatafields"))
.andExpect(jsonPath("$._links.search.href", Matchers.notNullValue()));
.andExpect(jsonPath("$._links.search.href", notNullValue()));
getClient().perform(get("/api/core/metadatafields/search"))
.andExpect(status().isOk())
.andExpect(content().contentType(contentType))
.andExpect(jsonPath("$._links.bySchema", Matchers.notNullValue()));
.andExpect(jsonPath("$._links.bySchema", notNullValue()));
}
@Test
@@ -83,6 +126,7 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
MetadataField metadataField = MetadataFieldBuilder
.createMetadataField(context, schema, "AnElement", "AQualifier", "AScopeNote").build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadatafields/search/bySchema")
.param("schema", "dc")
@@ -124,4 +168,236 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration
.andExpect(status().isUnprocessableEntity());
}
@Test
public void createSuccess() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement("testElementForCreate");
metadataFieldRest.setQualifier("testQualifierForCreate");
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String authToken = getAuthToken(admin.getEmail(), password);
AtomicReference<Integer> idRef = new AtomicReference<>();
assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue());
getClient(authToken)
.perform(post("/api/core/metadatafields")
.param("schemaId", metadataSchema.getID() + "")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isCreated())
.andDo(result -> idRef.set(read(result.getResponse().getContentAsString(), "$.id")));
getClient(authToken).perform(get("/api/core/metadatafields/" + idRef.get()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), "testElementForCreate", "testQualifierForCreate")));
}
@Test
public void createUnauthorized() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE);
getClient()
.perform(post("/api/core/metadatafields")
.param("schemaId", metadataSchema.getID() + "")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnauthorized());
}
@Test
public void createUnauthorizedEPersonNoAdminRights() throws Exception {
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setElement(ELEMENT);
metadataFieldRest.setQualifier(QUALIFIER);
metadataFieldRest.setScopeNote(SCOPE_NOTE);
String token = getAuthToken(eperson.getEmail(), password);
getClient(token)
.perform(post("/api/core/metadatafields")
.param("schemaId", metadataSchema.getID() + "")
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isForbidden());
}
@Test
public void deleteSuccess() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk());
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isNoContent());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isNotFound());
}
@Test
public void deleteUnauthorized() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk());
getClient()
.perform(delete("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isUnauthorized());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk());
}
@Test
public void deleteUnauthorizedEPersonNoAdminRights() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
String token = getAuthToken(eperson.getEmail(), password);
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk());
getClient(token)
.perform(delete("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isForbidden());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk());
}
@Test
public void deleteNonExisting() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
Integer id = metadataField.getID();
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadatafields/" + id))
.andExpect(status().isNoContent());
assertThat(metadataFieldService.find(context, id), nullValue());
getClient(getAuthToken(admin.getEmail(), password))
.perform(delete("/api/core/metadatafields/" + id))
.andExpect(status().isNotFound());
}
@Test
public void update() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT_UPDATED);
metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(admin.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isOk());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT_UPDATED, QUALIFIER_UPDATED)
));
}
@Test
public void updateUnauthorized() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT_UPDATED);
metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient()
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isUnauthorized());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT, QUALIFIER)
));
}
@Test
public void updateWrongRights() throws Exception {
context.turnOffAuthorisationSystem();
MetadataField metadataField = MetadataFieldBuilder.createMetadataField(context, ELEMENT, QUALIFIER, SCOPE_NOTE)
.build();
context.restoreAuthSystemState();
MetadataFieldRest metadataFieldRest = new MetadataFieldRest();
metadataFieldRest.setId(metadataField.getID());
metadataFieldRest.setElement(ELEMENT_UPDATED);
metadataFieldRest.setQualifier(QUALIFIER_UPDATED);
metadataFieldRest.setScopeNote(SCOPE_NOTE_UPDATED);
getClient(getAuthToken(eperson.getEmail(), password))
.perform(put("/api/core/metadatafields/" + metadataField.getID())
.content(new ObjectMapper().writeValueAsBytes(metadataFieldRest))
.contentType(contentType))
.andExpect(status().isForbidden());
getClient().perform(get("/api/core/metadatafields/" + metadataField.getID()))
.andExpect(status().isOk())
.andExpect(jsonPath("$", MetadataFieldMatcher.matchMetadataFieldByKeys(
metadataSchema.getName(), ELEMENT, QUALIFIER)
));
}
}

View File

@@ -1,3 +1,4 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source

View File

@@ -17,7 +17,9 @@ import java.util.UUID;
import org.dspace.app.rest.builder.SiteBuilder;
import org.dspace.app.rest.matcher.SiteMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.content.Site;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers;
import org.junit.Test;
@@ -67,4 +69,23 @@ public class SiteRestRepositoryIT extends AbstractControllerIntegrationTest {
.andExpect(status().isNotFound());
}
@Test
public void patchSiteMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchSiteMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
Site site = SiteBuilder.createSite(context).build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/sites/" + site.getID(), expectedStatus);
}
}

View File

@@ -239,7 +239,8 @@ public class WorkspaceItemRestRepositoryIT extends AbstractControllerIntegration
getClient().perform(get("/api/submission/workspaceitems/" + witem.getID() + "/collection"))
.andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers
.is(CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle()))));
.is(CollectionMatcher.matchCollectionEntry(col1.getName(), col1.getID(), col1.getHandle()))
));
getClient().perform(get("/api/submission/workspaceitems/" + witem.getID() + "/item")).andExpect(status().isOk())
.andExpect(jsonPath("$", Matchers.is(ItemMatcher.matchItemWithTitleAndDateIssued(witem.getItem(),

View File

@@ -8,8 +8,8 @@
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
@@ -30,9 +30,9 @@ public class BitstreamMatcher {
hasJsonPath("$.uuid", is(bitstream.getID().toString())),
hasJsonPath("$.name", is(bitstream.getName())),
hasJsonPath("$.bundleName", is("ORIGINAL")),
hasJsonPath("$.metadata", containsInAnyOrder(
BitstreamMetadataMatcher.matchTitle(bitstream.getName()),
BitstreamMetadataMatcher.matchDescription(bitstream.getDescription())
hasJsonPath("$.metadata", allOf(
matchMetadata("dc.title", bitstream.getName()),
matchMetadata("dc.description", bitstream.getDescription())
)),
hasJsonPath("$.sizeBytes", is((int) bitstream.getSizeBytes())),
hasJsonPath("$.checkSum", matchChecksum()),

View File

@@ -1,33 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import org.hamcrest.Matcher;
public class BitstreamMetadataMatcher {
private BitstreamMetadataMatcher() { }
public static Matcher<? super Object> matchTitle(String title) {
return allOf(
hasJsonPath("$.key", is("dc.title")),
hasJsonPath("$.value", is(title))
);
}
public static Matcher<? super Object> matchDescription(String description) {
return allOf(
hasJsonPath("$.key", is("dc.description")),
hasJsonPath("$.value", is(description))
);
}
}

View File

@@ -32,8 +32,8 @@ public class CollectionMatcher {
hasJsonPath("$.name", is(name)),
hasJsonPath("$.handle", is(handle)),
hasJsonPath("$.type", is("collection")),
hasJsonPath("$.metadata", Matchers.contains(
CollectionMetadataMatcher.matchTitle(name)
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.title", name)
)),
matchLinks(uuid),
matchLogo(logo)

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.allOf;
import org.hamcrest.Matcher;
public class CollectionMetadataMatcher {
private CollectionMetadataMatcher() { }
public static Matcher<? super Object> matchTitle(String title) {
return allOf(
hasJsonPath("$.key", is("dc.title")),
hasJsonPath("$.value", is(title))
);
}
}

View File

@@ -36,8 +36,8 @@ public class CommunityMatcher {
hasJsonPath("$.name", is(name)),
hasJsonPath("$.handle", is(handle)),
hasJsonPath("$.type", is("community")),
hasJsonPath("$.metadata", Matchers.contains(
CommunityMetadataMatcher.matchMetadata("dc.title", name)
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("dc.title", name)
))
);
}

View File

@@ -1,26 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import org.hamcrest.Matcher;
public class CommunityMetadataMatcher {
private CommunityMetadataMatcher() { }
public static Matcher<? super Object> matchMetadata(String key, String value) {
return allOf(
hasJsonPath("$.key", is(key)),
hasJsonPath("$.value", is(value))
);
}
}

View File

@@ -29,9 +29,9 @@ public class EPersonMatcher {
hasJsonPath("$.type", is("eperson")),
hasJsonPath("$.canLogIn", not(empty())),
hasJsonPath("$._links.self.href", containsString("/api/eperson/epersons/" + ePerson.getID().toString())),
hasJsonPath("$.metadata", Matchers.hasItems(
EPersonMetadataMatcher.matchFirstName(ePerson.getFirstName()),
EPersonMetadataMatcher.matchLastName(ePerson.getLastName())
hasJsonPath("$.metadata", Matchers.allOf(
MetadataMatcher.matchMetadata("eperson.firstname", ePerson.getFirstName()),
MetadataMatcher.matchMetadata("eperson.lastname", ePerson.getLastName())
))
);
}

View File

@@ -1,40 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import org.hamcrest.Matcher;
public class EPersonMetadataMatcher {
private EPersonMetadataMatcher() { }
public static Matcher<? super Object> matchFirstName(String firstName) {
return allOf(
hasJsonPath("$.key", is("eperson.firstname")),
hasJsonPath("$.value", is(firstName))
);
}
public static Matcher<? super Object> matchLastName(String lastName) {
return allOf(
hasJsonPath("$.key", is("eperson.lastname")),
hasJsonPath("$.value", is(lastName))
);
}
public static Matcher<? super Object> matchLanguage(String language) {
return allOf(
hasJsonPath("$.key", is("eperson.language")),
hasJsonPath("$.value", is(language))
);
}
}

View File

@@ -8,9 +8,9 @@
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata;
import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
@@ -34,8 +34,9 @@ public class ItemMatcher {
matchItemProperties(item),
//Check core metadata (the JSON Path expression evaluates to a collection so we have to use contains)
hasJsonPath("$.metadata[?(@.key=='dc.title')].value", contains(title)),
hasJsonPath("$.metadata[?(@.key=='dc.date.issued')].value", contains(dateIssued)),
hasJsonPath("$.metadata", allOf(
matchMetadata("dc.title", title),
matchMetadata("dc.date.issued", dateIssued))),
//Check links
matchItemLinks(item)

View File

@@ -0,0 +1,45 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.matcher;
import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.is;
import org.hamcrest.Matcher;
/**
* Utility class to provide convenient matchers for metadata.
*/
public class MetadataMatcher {
private MetadataMatcher() { }
/**
* Gets a matcher to ensure a given value is present among all values for a given metadata key.
*
* @param key the metadata key.
* @param value the value that must be present.
* @return the matcher.
*/
public static Matcher<? super Object> matchMetadata(String key, String value) {
return hasJsonPath("$.['" + key + "'][*].value", contains(value));
}
/**
* Gets a matcher to ensure a given value is present at a specific position in the list of values for a given key.
*
* @param key the metadata key.
* @param value the value that must be present.
* @param position the position it must be present at.
* @return the matcher.
*/
public static Matcher<? super Object> matchMetadata(String key, String value, int position) {
return hasJsonPath("$.['" + key + "'][" + position + "].value", is(value));
}
}

View File

@@ -29,9 +29,13 @@ public class MetadataschemaMatcher {
}
public static Matcher<? super Object> matchEntry(MetadataSchema metadataSchema) {
return matchEntry(metadataSchema.getName(), metadataSchema.getNamespace());
}
public static Matcher<? super Object> matchEntry(String name, String nameSpace) {
return allOf(
hasJsonPath("$.prefix", is(metadataSchema.getName())),
hasJsonPath("$.namespace", is(metadataSchema.getNamespace())),
hasJsonPath("$.prefix", is(name)),
hasJsonPath("$.namespace", is(nameSpace)),
hasJsonPath("$.type", is("metadataschema")),
hasJsonPath("$._links.self.href", Matchers.containsString("/api/core/metadataschemas"))
);

View File

@@ -0,0 +1,90 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.test;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import javax.ws.rs.core.MediaType;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
/**
* Utility class for performing metadata patch tests sourced from a common json file (see constructor).
*/
public class MetadataPatchSuite {
private final ObjectMapper objectMapper = new ObjectMapper();
private final JsonNode suite;
/**
* Initializes the suite by parsing the json file of tests.
*
* @throws Exception if there is an error reading the file.
*/
public MetadataPatchSuite() throws Exception {
suite = objectMapper.readTree(getClass().getResourceAsStream("metadata-patch-suite.json"));
}
/**
* Runs all tests in the file using the given client and url, expecting the given status.
*
* @param client the client to use.
* @param url the url to issue the patch against.
* @param expectedStatus the expected http status code. If this does not match the actual code, the test fails.
*/
public void runWith(MockMvc client, String url, int expectedStatus) {
for (JsonNode testNode: suite.get("tests")) {
String requestBody = testNode.get("patch").toString();
String expectedMetadata = testNode.get("expect").toString();
try {
System.out.println("Running patch test: " + testNode.get("name") + "\nRequest: " + requestBody);
checkResponse("PATCH", client, patch(url).content(requestBody), expectedMetadata, expectedStatus);
if (expectedStatus >= 200 && expectedStatus < 300) {
checkResponse("GET", client, get(url), expectedMetadata, expectedStatus);
}
} catch (Throwable t) {
Assert.fail("Metadata patch test '" + testNode.get("name") + "' failed.\n" + "Request body: "
+ requestBody + "\n" + "Error: " + (t instanceof AssertionError ? "" : t.getClass().getName())
+ t.getMessage());
}
}
}
/**
* Issues a PATCH or GET request and checks that the body and response code match what is expected.
*
* @param verb the http verb (PATCH or GET).
* @param client the client to use.
* @param requestBuilder the request builder that has been pre-seeded with the request url and request body.
* @param expectedMetadata the expected metadata as a minimal (no extra spaces) json string. Note: This will
* only be checked if the expectedStatus is in the 200 range.
* @param expectedStatus the expected http response status.
* @throws Exception if any checked error occurs, signifying test failure.
*/
private void checkResponse(String verb, MockMvc client, MockHttpServletRequestBuilder requestBuilder,
String expectedMetadata, int expectedStatus) throws Exception {
ResultActions resultActions = client.perform(requestBuilder
.contentType(MediaType.APPLICATION_JSON_PATCH_JSON))
.andExpect(status().is(expectedStatus));
if (expectedStatus >= 200 && expectedStatus < 300) {
String responseBody = resultActions.andReturn().getResponse().getContentAsString();
JsonNode responseJson = objectMapper.readTree(responseBody);
String responseMetadata = responseJson.get("metadata").toString();
if (!responseMetadata.equals(expectedMetadata)) {
Assert.fail("Expected metadata in " + verb + " response: " + expectedMetadata
+ "\nGot metadata in " + verb + " response: " + responseMetadata);
}
}
}
}

View File

@@ -0,0 +1,152 @@
{
"tests": [
{
"name": "clear metadata",
"patch": [
{ "op": "replace",
"path": "/metadata",
"value": {}
}
],
"expect": {}
},
{
"name": "add first title",
"patch": [
{
"op": "add",
"path": "/metadata/dc.title",
"value": [
{ "value": "title 1" }
]
}
],
"expect": {
"dc.title": [
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 }
]
}
},
{
"name": "add second title",
"patch": [
{
"op": "add",
"path": "/metadata/dc.title/-",
"value": { "value": "最後のタイトル", "language": "ja_JP" }
}
],
"expect": {
"dc.title": [
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 },
{ "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 }
]
}
},
{
"name": "insert zeroth title",
"patch": [
{
"op": "add",
"path": "/metadata/dc.title/0",
"value": {
"value": "title 0"
}
}
],
"expect": {
"dc.title": [
{ "value": "title 0", "language": null, "authority": null, "confidence": -1 },
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 },
{ "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 }
]
}
},
{
"name": "move last title up one",
"patch": [
{
"op": "move",
"from": "/metadata/dc.title/2",
"path": "/metadata/dc.title/1"
}
],
"expect": {
"dc.title": [
{ "value": "title 0", "language": null, "authority": null, "confidence": -1 },
{ "value": "最後のタイトル", "language": "ja_JP", "authority": null, "confidence": -1 },
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 }
]
}
},
{
"name": "replace title 2 value and language in two operations",
"patch": [
{
"op": "replace",
"path": "/metadata/dc.title/1/value",
"value": "title A"
},
{
"op": "replace",
"path": "/metadata/dc.title/1/language",
"value": "en_US"
}
],
"expect": {
"dc.title": [
{ "value": "title 0", "language": null, "authority": null, "confidence": -1 },
{ "value": "title A", "language": "en_US", "authority": null, "confidence": -1 },
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 }
]
}
},
{
"name": "copy title A to end of list",
"patch": [
{
"op": "copy",
"from": "/metadata/dc.title/1",
"path": "/metadata/dc.title/-"
}
],
"expect": {
"dc.title": [
{ "value": "title 0", "language": null, "authority": null, "confidence": -1 },
{ "value": "title A", "language": "en_US", "authority": null, "confidence": -1 },
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 },
{ "value": "title A", "language": "en_US", "authority": null, "confidence": -1 }
]
}
},
{
"name": "remove both title A copies",
"patch": [
{
"op": "remove",
"path": "/metadata/dc.title/1"
},
{
"op": "remove",
"path": "/metadata/dc.title/2"
}
],
"expect": {
"dc.title": [
{ "value": "title 0", "language": null, "authority": null, "confidence": -1 },
{ "value": "title 1", "language": null, "authority": null, "confidence": -1 }
]
}
},
{
"name": "remove all titles",
"patch": [
{
"op": "remove",
"path": "/metadata/dc.title"
}
],
"expect": {}
}
]
}

View File

@@ -37,7 +37,7 @@ dspace.url = ${dspace.baseUrl}
# This is the URL that will be used for the REST endpoints to be served on.
# This will typically be followed by /api to determine the root endpoints.
dspace.restUrl = ${dspace.baseUrl}/rest
dspace.restUrl = ${dspace.baseUrl}/spring-rest
# Optional: DSpace URL for mobile access
# This

View File

@@ -31,6 +31,13 @@ oai.cache.enabled = true
# Base Cache Directory
oai.cache.dir = ${dspace.dir}/var/oai
#---------------------------------------------------------------#
#--------------OAI IMPORT CONFIGURATION ------------------------#
#---------------------------------------------------------------#
# Size of batches to commit to solr at a time
oai.import.batch.size = 1000
#---------------------------------------------------------------#
#--------------OAI HARVESTING CONFIGURATIONS--------------------#
#---------------------------------------------------------------#

View File

@@ -2,7 +2,15 @@
#------------SUBMISSION CURATION CONFIGURATIONS-----------------#
#---------------------------------------------------------------#
# This file contains configuration properties solely relating #
# to the scheduling of curation tasks during submission. #
# to the scheduling of curation tasks during submission -- that #
# is: when tasks are attached to a workflow. #
#---------------------------------------------------------------#
# Scan for viruses
submission-curation.virus-scan = false
# Report serializer plugin, to capture submission task reports.
# Uncomment exactly one, or configure your own.
# FileReporter writes reports to ${report.dir}/curation-yyyyMMddThhmmssSSS.report
plugin.single.org.dspace.curate.Reporter = org.dspace.curate.FileReporter
# LogReporter writes report lines to the DSpace log.
#plugin.single.org.dspace.curate.Reporter = org.dspace.curate.LogReporter

View File

@@ -1,33 +0,0 @@
# DSpace Database Now Upgrades Automatically
AS OF DSPACE 5, the DSpace database now upgrades itself AUTOMATICALLY.
Therefore, all `database_schema*.sql` files have been removed. Starting
with DSpace 4.x -> 5.0 upgrade, you will no longer need to manually run any
SQL scripts to upgrade your database.
Please see the [5.0 Upgrade Instructions](https://wiki.duraspace.org/display/DSDOC5x/Upgrading+DSpace)
for more information on upgrading to DSpace 5.
## More info on automatic database upgrades
As of DSpace 5.0, we now use [Flyway DB](http://flywaydb.org/) along with the
SQL scripts embedded in the `dspace-api.jar` to automatically keep your DSpace
database up-to-date. These scripts are now located in the source code at:
`[dspace-src]/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle`
As Flyway automates the upgrade process, you should NEVER run these SQL scripts
manually. For more information, please see the `README.md` in the scripts directory.
## Using the update-sequences.sql script
The `update-sequences.sql` script in this directory may still be used to update
your internal database counts if you feel they have gotten out of "sync". This
may sometimes occur after large restores of content (e.g. when using the DSpace
[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore)
feature).
This `update-sequences.sql` script can be run manually. It will not harm your
database (or its contents) in any way. It just ensures all database counts (i.e.
sequences) are properly set to the next available value.

View File

@@ -1,75 +0,0 @@
--
-- update-sequences.sql
--
-- Copyright (c) 2002-2016, The DSpace Foundation. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- - Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- - Neither the name of the DSpace Foundation nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-- DAMAGE.
-- SQL code to update the ID (primary key) generating sequences, if some
-- import operation has set explicit IDs.
--
-- Sequences are used to generate IDs for new rows in the database. If a
-- bulk import operation, such as an SQL dump, specifies primary keys for
-- imported data explicitly, the sequences are out of sync and need updating.
-- This SQL code does just that.
--
-- This should rarely be needed; any bulk import should be performed using the
-- org.dspace.content API which is safe to use concurrently and in multiple
-- JVMs. The SQL code below will typically only be required after a direct
-- SQL data dump from a backup or somesuch.
-- Depends on being run from sqlplus with incseq.sql in the current path
-- you can find incseq.sql at: http://www.akadia.com/services/scripts/incseq.sql
-- Here that script was renamed to updateseq.sql.
@updateseq.sql bitstreamformatregistry_seq bitstreamformatregistry bitstream_format_id ""
@updateseq.sql fileextension_seq fileextension file_extension_id ""
@updateseq.sql resourcepolicy_seq resourcepolicy policy_id ""
@updateseq.sql workspaceitem_seq workspaceitem workspace_item_id ""
@updateseq.sql workflowitem_seq workflowitem workflow_id ""
@updateseq.sql tasklistitem_seq tasklistitem tasklist_id ""
@updateseq.sql registrationdata_seq registrationdata registrationdata_id ""
@updateseq.sql subscription_seq subscription subscription_id ""
@updateseq.sql metadatafieldregistry_seq metadatafieldregistry metadata_field_id ""
@updateseq.sql metadatavalue_seq metadatavalue metadata_value_id ""
@updateseq.sql metadataschemaregistry_seq metadataschemaregistry metadata_schema_id ""
@updateseq.sql harvested_collection_seq harvested_collection id ""
@updateseq.sql harvested_item_seq harvested_item id ""
@updateseq.sql webapp_seq webapp webapp_id ""
@updateseq.sql requestitem_seq requestitem requestitem_id ""
@updateseq.sql handle_id_seq handle handle_id ""
-- Handle Sequence is a special case. Since Handles minted by DSpace use the 'handle_seq',
-- we need to ensure the next assigned handle will *always* be unique. So, 'handle_seq'
-- always needs to be set to the value of the *largest* handle suffix. That way when the
-- next handle is assigned, it will use the next largest number. This query does the following:
-- For all 'handle' values which have a number in their suffix (after '/'), find the maximum
-- suffix value, convert it to a number, and set the 'handle_seq' to start at the next value
-- (see updateseq.sql script for more)
@updateseq.sql handle_seq handle "to_number(regexp_replace(handle, '.*/', ''), '999999999999')" "WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')"

View File

@@ -1,30 +0,0 @@
-- #############################################################################################
--
-- %Purpose: Set a sequence to the max value of a given attribute
--
-- #############################################################################################
--
-- Paramters:
-- 1: sequence name
-- 2: table name
-- 3: attribute name
--
-- Sample usage:
-- @updateseq.sql my_sequence my_table my_attribute where-clause
--
--------------------------------------------------------------------------------
--
SET SERVEROUTPUT ON SIZE 1000000;
--
DECLARE
curr NUMBER := 0;
BEGIN
SELECT max(&3) INTO curr FROM &2 &4;
curr := curr + 1;
EXECUTE IMMEDIATE 'DROP SEQUENCE &1';
EXECUTE IMMEDIATE 'CREATE SEQUENCE &1 START WITH ' || NVL(curr,1);
END;
/

View File

@@ -1,33 +0,0 @@
# DSpace Database Now Upgrades Automatically
AS OF DSPACE 5, the DSpace database now upgrades itself AUTOMATICALLY.
Therefore, all `database_schema*.sql` files have been removed. Starting
with DSpace 4.x -> 5.0 upgrade, you will no longer need to manually run any
SQL scripts to upgrade your database.
Please see the [5.0 Upgrade Instructions](https://wiki.duraspace.org/display/DSDOC5x/Upgrading+DSpace)
for more information on upgrading to DSpace 5.
## More info on automatic database upgrades
As of DSpace 5.0, we now use [Flyway DB](http://flywaydb.org/) along with the
SQL scripts embedded in the `dspace-api.jar` to automatically keep your DSpace
database up-to-date. These scripts are now located in the source code at:
`[dspace-src]/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres`
As Flyway automates the upgrade process, you should NEVER run these SQL scripts
manually. For more information, please see the `README.md` in the scripts directory.
## Using the update-sequences.sql script
The `update-sequences.sql` script in this directory may still be used to update
your internal database counts if you feel they have gotten out of "sync". This
may sometimes occur after large restores of content (e.g. when using the DSpace
[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore)
feature).
This `update-sequences.sql` script can be run manually. It will not harm your
database (or its contents) in any way. It just ensures all database counts (i.e.
sequences) are properly set to the next available value.

View File

@@ -569,16 +569,6 @@ Common usage:
<fileset dir="lib" />
</copy>
<move todir="${dspace.dir}/etc.bak-${build.date}" failonerror="no">
<fileset dir="${dspace.dir}/etc">
<include name="**/*" />
</fileset>
</move>
<copy todir="${dspace.dir}/etc" preservelastmodified="true">
<fileset dir="etc" />
</copy>
<copy todir="${dspace.dir}/solr" preservelastmodified="true">
<fileset dir="solr"/>
</copy>
@@ -593,10 +583,6 @@ Common usage:
${dspace.dir}/lib.bak-${build.date}
${dspace.dir}/etc was backed up to
${dspace.dir}/etc.bak-${build.date}
Please review these directories and delete if no longer needed.
====================================================================
</echo>
@@ -728,8 +714,6 @@ Common usage:
<mkdir dir="${dspace.dir}/lib" />
<mkdir dir="${dspace.dir}/etc" />
<mkdir dir="${dspace.dir}/webapps" />
<mkdir dir="${dspace.dir}/triplestore" />
@@ -821,17 +805,6 @@ Common usage:
<fileset dir="lib" />
</copy>
<!-- NB: no regular use is made of etc/ files in an installed
- system, so this step is 'deprecated', and will eventually
- be removed. -->
<delete failonerror="no">
<fileset dir="${dspace.dir}/etc" includes="**/*" />
</delete>
<copy todir="${dspace.dir}/etc" preservelastmodified="true">
<fileset dir="etc" />
</copy>
<copy todir="${dspace.dir}/solr" preservelastmodified="true">
<fileset dir="solr" />
</copy>

View File

@@ -1,6 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd">
<!--
The contents of this file are subject to the license and copyright
@@ -15,7 +13,11 @@
This file overrides the solr localhost restriction.
-->
<web-app>
<web-app version='2.5'
xmlns='http://java.sun.com/xml/ns/javaee'
xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
xsi:schemaLocation='http://java.sun.com/xml/ns/javaee
http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd'>
<!-- Uncomment if you are trying to use a Resin version before 3.0.19.
Their XML implementation isn't entirely compatible with Xerces.
@@ -28,32 +30,35 @@
"com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl"/>
-->
<!-- People who want to hardcode their "Solr Home" directly into the
WAR File can set the JNDI property here...
-->
<!--
<env-entry>
<env-entry-name>solr/home</env-entry-name>
<env-entry-value>/put/your/solr/home/here</env-entry-value>
<env-entry-type>java.lang.String</env-entry-type>
</env-entry>
-->
<env-entry>
<description>Solr home: configuration, cores etc.</description>
<env-entry-name>solr/home</env-entry-name>
<env-entry-value>${dspace.dir}/solr</env-entry-value>
<env-entry-type>java.lang.String</env-entry-type>
</env-entry>
<!-- Tell Solr where its log4j configuration is located -->
<!-- NOTE: Solr cannot use the default DSpace log4j configuration as it isn't
initialized until the DSpace Kernel starts up, and we don't want Solr to
depend on the DSpace Kernel/API -->
<!-- NOTE: Solr cannot use the default DSpace log4j configuration as it
isn't initialized until the DSpace Kernel starts up, and we don't want
Solr to depend on the DSpace Kernel/API -->
<context-param>
<param-name>log4j.configuration</param-name>
<param-value>${dspace.dir}/config/log4j-solr.properties</param-value>
<description>URL locating a Log4J configuration file (properties or XML).</description>
<description>
URL locating a Log4J configuration file (properties or XML).
</description>
<param-name>log4jConfiguration</param-name>
<param-value>${dspace.dir}/config/log4j-solr.xml</param-value>
</context-param>
<listener>
<listener-class>org.apache.logging.log4j.web.Log4jServletContextListener</listener-class>
</listener>
<filter>
<description>Activate logging</description>
<filter-name>log4jServletFilter</filter-name>
<filter-class>org.apache.logging.log4j.web.Log4jServletFilter</filter-class>
</filter>
<!-- Any path (name) registered in solrconfig.xml will be sent to that filter -->
<filter>
<filter-name>LocalHostRestrictionFilter</filter-name>
@@ -87,6 +92,15 @@
-->
</filter>
<filter-mapping>
<filter-name>log4jServletFilter</filter-name>
<url-pattern>/*</url-pattern>
<dispatcher>REQUEST</dispatcher>
<dispatcher>FORWARD</dispatcher>
<dispatcher>INCLUDE</dispatcher>
<dispatcher>ERROR</dispatcher>
</filter-mapping>
<!--
<filter-mapping>
<filter-name>LocalHostRestrictionFilter</filter-name>
@@ -111,10 +125,6 @@
<!-- Otherwise it will continue to the old servlets -->
<listener>
<listener-class>org.dspace.solr.filters.ConfigureLog4jListener</listener-class>
</listener>
<servlet>
<servlet-name>Zookeeper</servlet-name>
<servlet-class>org.apache.solr.servlet.ZookeeperInfoServlet</servlet-class>