Merge branch 'master' of https://github.com/DSpace/DSpace into mydspace_clean

This commit is contained in:
Andrea Bollini
2019-03-28 13:15:21 +01:00
143 changed files with 4418 additions and 8671 deletions

View File

@@ -26,19 +26,20 @@ before_install:
# Skip install stage, as we'll do it below # Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'" install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
# Two stage Build and Test # Build DSpace and run both Unit and Integration Tests
# 1. Install & Unit Test APIs
# 2. Assemble DSpace
script: script:
# 1. [Install & Unit Test] Check source code licenses and run source code Unit Tests # Summary of flags used (below):
# license:check => Validate all source code license headers # license:check => Validate all source code license headers
# -Dmaven.test.skip=false => Enable DSpace Unit Tests # -Dmaven.test.skip=false => Enable DSpace Unit Tests
# -DskipITs=false => Enable DSpace Integration Tests # -DskipITs=false => Enable DSpace Integration Tests
# -P !assembly => Skip normal assembly (as it can be memory intensive) # -P !assembly => Skip assembly of "dspace-installer" directory (as it can be memory intensive)
# -B => Maven batch/non-interactive mode (recommended for CI) # -B => Maven batch/non-interactive mode (recommended for CI)
# -V => Display Maven version info before build # -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries # -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" - "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive) # After a successful build and test (see 'script'), send code coverage reports to coveralls.io
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2" # These code coverage reports are generated by jacoco-maven-plugin (during test process above).
after_success:
# Run "verify", enabling the "coveralls" profile. This sends our reports to coveralls.io (see coveralls-maven-plugin)
- "cd dspace && mvn verify -P coveralls"

View File

@@ -15,7 +15,8 @@ WORKDIR /app
# The dspace-install directory will be written to /install # The dspace-install directory will be written to /install
RUN mkdir /install \ RUN mkdir /install \
&& chown -Rv dspace: /install && chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace USER dspace

View File

@@ -15,7 +15,8 @@ WORKDIR /app
# The dspace-install directory will be written to /install # The dspace-install directory will be written to /install
RUN mkdir /install \ RUN mkdir /install \
&& chown -Rv dspace: /install && chown -Rv dspace: /install \
&& chown -Rv dspace: /app
USER dspace USER dspace

View File

@@ -587,7 +587,7 @@
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId> <artifactId>solr-solrj</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
@@ -622,7 +622,7 @@
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId> <artifactId>lucene-core</artifactId>
<version>4.10.4</version> <version>${solr.client.version}</version>
</dependency> </dependency>
<dependency> <dependency>

View File

@@ -121,20 +121,24 @@ public class IndexVersion {
} }
// Open this index directory in Lucene // Open this index directory in Lucene
Directory indexDir = FSDirectory.open(dir); Directory indexDir = FSDirectory.open(dir.toPath());
// Get info on the Lucene segment file(s) in index directory // Get info on the Lucene segment file(s) in index directory
SegmentInfos sis = new SegmentInfos(); SegmentInfos sis;
try { try {
sis.read(indexDir); sis = SegmentInfos.readLatestCommit(indexDir);
} catch (IOException ie) { } catch (IOException ie) {
// Wrap default IOException, providing more info about which directory cannot be read // Wrap default IOException, providing more info about which directory cannot be read
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie); throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath(), ie);
} }
if (null == sis) {
throw new IOException("Could not read Lucene segments files in " + dir.getAbsolutePath());
}
// If we have a valid Solr index dir, but it has no existing segments // If we have a valid Solr index dir, but it has no existing segments
// then just return an empty string. It's a valid but empty index. // then just return an empty string. It's a valid but empty index.
if (sis != null && sis.size() == 0) { if (sis.size() == 0) {
return ""; return "";
} }

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.authority; package org.dspace.authority;
import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.util.List; import java.util.List;
@@ -22,7 +23,8 @@ import org.apache.solr.client.solrj.response.QueryResponse;
*/ */
public interface AuthoritySearchService { public interface AuthoritySearchService {
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException; public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException;
public List<String> getAllIndexedMetadataFields() throws Exception; public List<String> getAllIndexedMetadataFields() throws Exception;

View File

@@ -15,7 +15,7 @@ import java.util.List;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -39,16 +39,17 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
/** /**
* Non-Static CommonsHttpSolrServer for processing indexing events. * Non-Static CommonsHttpSolrServer for processing indexing events.
*/ */
protected HttpSolrServer solr = null; protected HttpSolrClient solr = null;
protected HttpSolrServer getSolr() throws MalformedURLException, SolrServerException { protected HttpSolrClient getSolr()
throws MalformedURLException, SolrServerException, IOException {
if (solr == null) { if (solr == null) {
String solrService = ConfigurationManager.getProperty("solr.authority.server"); String solrService = ConfigurationManager.getProperty("solr.authority.server");
log.debug("Solr authority URL: " + solrService); log.debug("Solr authority URL: " + solrService);
solr = new HttpSolrServer(solrService); solr = new HttpSolrClient.Builder(solrService).build();
solr.setBaseURL(solrService); solr.setBaseURL(solrService);
SolrQuery solrQuery = new SolrQuery().setQuery("*:*"); SolrQuery solrQuery = new SolrQuery().setQuery("*:*");
@@ -129,7 +130,8 @@ public class AuthoritySolrServiceImpl implements AuthorityIndexingService, Autho
} }
@Override @Override
public QueryResponse search(SolrQuery query) throws SolrServerException, MalformedURLException { public QueryResponse search(SolrQuery query)
throws SolrServerException, MalformedURLException, IOException {
return getSolr().query(query); return getSolr().query(query);
} }

View File

@@ -462,13 +462,6 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
return bitstreamDAO.getNotReferencedBitstreams(context); return bitstreamDAO.getNotReferencedBitstreams(context);
} }
@Override
public void addMetadata(Context context, Bitstream dso, MetadataField metadataField, String lang,
List<String> values, List<String> authorities, List<Integer> confidences)
throws SQLException {
addMetadata(context, dso, metadataField, lang, values, authorities, confidences, null);
}
public Long getLastModified(Bitstream bitstream) { public Long getLastModified(Bitstream bitstream) {
return bitstreamStorageService.getLastModified(bitstream); return bitstreamStorageService.getLastModified(bitstream);
} }

View File

@@ -372,7 +372,7 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
log.error(LogManager.getHeader(context, "setWorkflowGroup", log.error(LogManager.getHeader(context, "setWorkflowGroup",
"collection_id=" + collection.getID() + " " + e.getMessage()), e); "collection_id=" + collection.getID() + " " + e.getMessage()), e);
} }
if (!StringUtils.equals("default", workflow.getID())) { if (!StringUtils.equals(XmlWorkflowFactory.LEGACY_WORKFLOW_NAME, workflow.getID())) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"setWorkflowGroup can be used only on collection with the default basic dspace workflow. " "setWorkflowGroup can be used only on collection with the default basic dspace workflow. "
+ "Instead, the collection: " + "Instead, the collection: "

View File

@@ -231,7 +231,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
@Override @Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values, public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
List<String> authorities, List<Integer> confidences, List<Integer> places) List<String> authorities, List<Integer> confidences)
throws SQLException { throws SQLException {
boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField); boolean authorityControlled = metadataAuthorityService.isAuthorityControlled(metadataField);
boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField); boolean authorityRequired = metadataAuthorityService.isAuthorityRequired(metadataField);
@@ -703,15 +703,4 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
} }
@Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
List<String> authorities, List<Integer> confidences) throws SQLException {
addMetadata(context, dso, metadataField, lang, values, authorities, confidences, null);
}
@Override
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
String authority, int confidence, int place) throws SQLException {
addMetadata(context, dso, metadataField, language, value, authority, confidence);
}
} }

View File

@@ -10,7 +10,6 @@ package org.dspace.content;
import java.io.Serializable; import java.io.Serializable;
import java.sql.SQLException; import java.sql.SQLException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
@@ -29,11 +28,6 @@ public interface InProgressSubmission<ID extends Serializable> extends Indexable
*/ */
ID getID(); ID getID();
/**
* Update the submission, including the unarchived item.
*/
void update() throws SQLException, AuthorizeException;
/** /**
* Get the incomplete item object * Get the incomplete item object
* *

View File

@@ -27,9 +27,7 @@ import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
@@ -245,20 +243,6 @@ public class WorkspaceItem
supervisorGroups.add(group); supervisorGroups.add(group);
} }
@Override
public void update() throws SQLException, AuthorizeException {
Context context = null;
try {
context = new Context();
ContentServiceFactory.getInstance().getWorkspaceItemService().update(context, this);
} finally {
if (context != null && context.isValid()) {
context.abort();
}
}
}
@Override @Override
public int getType() { public int getType() {
return Constants.WORKSPACEITEM; return Constants.WORKSPACEITEM;

View File

@@ -89,9 +89,9 @@ public class SolrAuthority implements ChoiceAuthority {
String localSortField = ""; String localSortField = "";
if (StringUtils.isNotBlank(locale)) { if (StringUtils.isNotBlank(locale)) {
localSortField = sortField + "_" + locale; localSortField = sortField + "_" + locale;
queryArgs.setSortField(localSortField, SolrQuery.ORDER.asc); queryArgs.addSort(localSortField, SolrQuery.ORDER.asc);
} else { } else {
queryArgs.setSortField(sortField, SolrQuery.ORDER.asc); queryArgs.addSort(sortField, SolrQuery.ORDER.asc);
} }
Choices result; Choices result;
@@ -100,14 +100,14 @@ public class SolrAuthority implements ChoiceAuthority {
boolean hasMore = false; boolean hasMore = false;
QueryResponse searchResponse = getSearchService().search(queryArgs); QueryResponse searchResponse = getSearchService().search(queryArgs);
SolrDocumentList authDocs = searchResponse.getResults(); SolrDocumentList authDocs = searchResponse.getResults();
ArrayList<Choice> choices = new ArrayList<Choice>(); ArrayList<Choice> choices = new ArrayList<>();
if (authDocs != null) { if (authDocs != null) {
max = (int) searchResponse.getResults().getNumFound(); max = (int) searchResponse.getResults().getNumFound();
int maxDocs = authDocs.size(); int maxDocs = authDocs.size();
if (limit < maxDocs) { if (limit < maxDocs) {
maxDocs = limit; maxDocs = limit;
} }
List<AuthorityValue> alreadyPresent = new ArrayList<AuthorityValue>(); List<AuthorityValue> alreadyPresent = new ArrayList<>();
for (int i = 0; i < maxDocs; i++) { for (int i = 0; i < maxDocs; i++) {
SolrDocument solrDocument = authDocs.get(i); SolrDocument solrDocument = authDocs.get(i);
if (solrDocument != null) { if (solrDocument != null) {

View File

@@ -239,26 +239,6 @@ public interface DSpaceObjectService<T extends DSpaceObject> extends FindableObj
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values, public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
List<String> authorities, List<Integer> confidences) throws SQLException; List<String> authorities, List<Integer> confidences) throws SQLException;
/**
* Add metadata fields. These are appended to existing values.
* Use <code>clearDC</code> to remove values. The values are insert in the
* positions passed in the places argument.
*
* @param context DSpace context
* @param dso DSpaceObject
* @param metadataField the metadata field to which the value is to be set
* @param lang the ISO639 language code, optionally followed by an underscore
* and the ISO3166 country code. <code>null</code> means the
* value has no language (for example, a date).
* @param values the values to add.
* @param authorities the external authority key for this value (or null)
* @param confidences the authority confidence (default 0)
* @param places the places to use for the supplied values
* @throws SQLException if database error
*/
public void addMetadata(Context context, T dso, MetadataField metadataField, String lang, List<String> values,
List<String> authorities, List<Integer> confidences, List<Integer> places) throws SQLException;
/** /**
* Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single * Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List)} when a single
* value need to be added * value need to be added
@@ -275,23 +255,6 @@ public interface DSpaceObjectService<T extends DSpaceObject> extends FindableObj
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value, public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
String authority, int confidence) throws SQLException; String authority, int confidence) throws SQLException;
/**
* Shortcut for {@link #addMetadata(Context, DSpaceObject, MetadataField, String, List, List, List, List)} when a
* single value need to be added
*
* @param context
* @param dso
* @param metadataField
* @param language
* @param value
* @param authority
* @param confidence
* @param place
* @throws SQLException
*/
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value,
String authority, int confidence, int place) throws SQLException;
public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value) public void addMetadata(Context context, T dso, MetadataField metadataField, String language, String value)
throws SQLException; throws SQLException;

View File

@@ -66,20 +66,20 @@ public class Constants {
public static final int WORKFLOWITEM = 9; public static final int WORKFLOWITEM = 9;
/** /**
* Type of pool task workflow objects * Type of pool task objects
*/ */
public static final int WORKFLOW_POOL = 10; public static final int POOLTASK = 10;
/** /**
* Type of pool task workflow objects * Type of claimed task objects
*/ */
public static final int WORKFLOW_CLAIMED = 11; public static final int CLAIMEDTASK = 11;
/** /**
* lets you look up type names from the type IDs * lets you look up type names from the type IDs
*/ */
public static final String[] typeText = { "BITSTREAM", "BUNDLE", "ITEM", "COLLECTION", "COMMUNITY", "SITE", "GROUP", public static final String[] typeText = { "BITSTREAM", "BUNDLE", "ITEM", "COLLECTION", "COMMUNITY", "SITE", "GROUP",
"EPERSON", "WORKSPACEITEM", "WORKFLOWITEM", "WORKFLOW POOL", "WORKFLOW CLAIMED" }; "EPERSON", "WORKSPACEITEM", "WORKFLOWITEM", "POOLTASK", "CLAIMEDTASK" };
/** /**
* Special Bundle and Bitstream Names: * Special Bundle and Bitstream Names:

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.ctask.test;
import java.io.IOException;
import org.dspace.content.DSpaceObject;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Curation task which simply reports its invocation without changing anything.
* Meant for testing.
*
* @author mhwood
*/
public class WorkflowReportTest
extends AbstractCurationTask {
private static final Logger LOG = LoggerFactory.getLogger(WorkflowReportTest.class);
@Override
public int perform(DSpaceObject dso)
throws IOException {
LOG.info("Class {} as task {} received 'perform' for object {}",
WorkflowReportTest.class.getSimpleName(), taskId, dso);
curator.report(String.format(
"Class %s as task %s received 'perform' for object %s%n",
WorkflowReportTest.class.getSimpleName(), taskId, dso));
return Curator.CURATE_SUCCESS;
}
}

View File

@@ -9,6 +9,10 @@ package org.dspace.curate;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileReader; import java.io.FileReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@@ -18,6 +22,7 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.output.NullOutputStream;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory; import org.dspace.core.factory.CoreServiceFactory;
@@ -57,7 +62,9 @@ public class CurationCli {
options.addOption("e", "eperson", true, options.addOption("e", "eperson", true,
"email address of curating eperson"); "email address of curating eperson");
options.addOption("r", "reporter", true, options.addOption("r", "reporter", true,
"reporter to manage results - use '-' to report to console. If absent, no reporting"); "relative or absolute path to the desired report file. "
+ "Use '-' to report to console. "
+ "If absent, no reporting");
options.addOption("s", "scope", true, options.addOption("s", "scope", true,
"transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " + "transaction scope to impose: use 'object', 'curation', or 'open'. If absent, 'open' " +
"applies"); "applies");
@@ -165,9 +172,17 @@ public class CurationCli {
} }
Curator curator = new Curator(); Curator curator = new Curator();
if (reporterName != null) { OutputStream reporter;
curator.setReporter(reporterName); if (null == reporterName) {
reporter = new NullOutputStream();
} else if ("-".equals(reporterName)) {
reporter = System.out;
} else {
reporter = new PrintStream(reporterName);
} }
Writer reportWriter = new OutputStreamWriter(reporter);
curator.setReporter(reportWriter);
if (scope != null) { if (scope != null) {
Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase()); Curator.TxScope txScope = Curator.TxScope.valueOf(scope.toUpperCase());
curator.setTransactionScope(txScope); curator.setTransactionScope(txScope);

View File

@@ -15,6 +15,7 @@ import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -69,16 +70,12 @@ public class Curator {
INTERACTIVE, BATCH, ANY INTERACTIVE, BATCH, ANY
} }
;
// transaction scopes // transaction scopes
public static enum TxScope { public static enum TxScope {
OBJECT, CURATION, OPEN OBJECT, CURATION, OPEN
} }
; private static final Logger log = LogManager.getLogger();
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(Curator.class);
protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<>(); protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<>();
@@ -86,7 +83,7 @@ public class Curator {
protected Map<String, TaskRunner> trMap = new HashMap<>(); protected Map<String, TaskRunner> trMap = new HashMap<>();
protected List<String> perfList = new ArrayList<>(); protected List<String> perfList = new ArrayList<>();
protected TaskQueue taskQ = null; protected TaskQueue taskQ = null;
protected String reporter = null; protected Appendable reporter = null;
protected Invoked iMode = null; protected Invoked iMode = null;
protected TaskResolver resolver = new TaskResolver(); protected TaskResolver resolver = new TaskResolver();
protected TxScope txScope = TxScope.OPEN; protected TxScope txScope = TxScope.OPEN;
@@ -193,7 +190,7 @@ public class Curator {
* causes reporting to standard out. * causes reporting to standard out.
* @return return self (Curator instance) with reporter set * @return return self (Curator instance) with reporter set
*/ */
public Curator setReporter(String reporter) { public Curator setReporter(Appendable reporter) {
this.reporter = reporter; this.reporter = reporter;
return this; return this;
} }
@@ -346,9 +343,10 @@ public class Curator {
* @param message the message to output to the reporting stream. * @param message the message to output to the reporting stream.
*/ */
public void report(String message) { public void report(String message) {
// Stub for now try {
if ("-".equals(reporter)) { reporter.append(message);
System.out.println(message); } catch (IOException ex) {
log.error("Task reporting failure", ex);
} }
} }

View File

@@ -0,0 +1,88 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
/**
* Save a curation report to a unique file in the reports directory.
* Reports are named by the date and time of day, for example:
* "curation-20180916T113903045.report".
*
* @author mhwood
*/
public class FileReporter
implements Reporter {
private final Writer writer;
/**
* Open a writer to a file in a directory named by the configuration
* property {@code report.dir}, or in {@code [DSpace]/reports} if not
* configured.
*
* @throws IOException if there is a problem with the file path.
*/
public FileReporter()
throws IOException {
// Calculate a unique(?) file name.
Date now = GregorianCalendar.getInstance().getTime();
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd'T'hhmmssSSS");
String filename = String.format("curation-%s.report", sdf.format(now));
// Build a path to the directory which is to receive the file.
ConfigurationService cfg = new DSpace().getConfigurationService();
String reportDir = cfg.getProperty("report.dir");
Path reportPath;
if (null == reportDir) {
reportPath = Paths.get(cfg.getProperty("dspace.dir"),
"reports",
filename);
} else {
reportPath = Paths.get(reportDir, filename);
}
// Open the file.
writer = new FileWriter(reportPath.toFile());
}
@Override
public Appendable append(CharSequence cs)
throws IOException {
writer.append(cs);
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
writer.append(cs, i, i1);
return this;
}
@Override
public Appendable append(char c) throws IOException {
writer.append(c);
return this;
}
@Override
public void close() throws Exception {
writer.close();
}
}

View File

@@ -0,0 +1,62 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Write curation report records through the logging framework.
* Whole lines (strings ending in '\n') are written to the log category "curation".
* Any partial line is flushed when the reporter is {@code close()}d.
*
* @author mhwood
*/
public class LogReporter
implements Reporter {
private static final Logger LOG = LoggerFactory.getLogger("curation");
private final StringBuilder buffer = new StringBuilder();
@Override
public Appendable append(CharSequence cs)
throws IOException {
for (int pos = 0; pos < cs.length(); pos++) {
char c = cs.charAt(pos);
if (c == '\n') {
LOG.info(buffer.toString());
buffer.delete(0, buffer.length()); // Clear the buffer
} else {
buffer.append(c);
}
}
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
return append(cs.subSequence(i, i1));
}
@Override
public Appendable append(char c)
throws IOException {
return append(String.valueOf(c));
}
@Override
public void close()
throws Exception {
if (buffer.length() > 0) {
LOG.info(buffer.toString());
}
}
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
/**
* A marker interface needed to make curation reporter classes into plugins.
*
* @author mhwood
*/
public interface Reporter
extends Appendable, AutoCloseable {
}

View File

@@ -30,6 +30,8 @@ import org.dspace.content.Item;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.LogManager; import org.dspace.core.LogManager;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.core.service.PluginService;
import org.dspace.curate.service.WorkflowCuratorService; import org.dspace.curate.service.WorkflowCuratorService;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
@@ -56,9 +58,10 @@ import org.springframework.beans.factory.annotation.Autowired;
public class WorkflowCuratorServiceImpl implements WorkflowCuratorService { public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
/** /**
* log4j logger * Logging category
*/ */
private Logger log = org.apache.logging.log4j.LogManager.getLogger(WorkflowCuratorServiceImpl.class); private static final Logger log
= org.apache.logging.log4j.LogManager.getLogger();
protected Map<String, TaskSet> tsMap = new HashMap<String, TaskSet>(); protected Map<String, TaskSet> tsMap = new HashMap<String, TaskSet>();
@@ -118,6 +121,7 @@ public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
Curator curator = new Curator(); Curator curator = new Curator();
// are we going to perform, or just put on queue? // are we going to perform, or just put on queue?
if (step.queue != null) { if (step.queue != null) {
// The queue runner will call setReporter
for (Task task : step.tasks) { for (Task task : step.tasks) {
curator.addTask(task.name); curator.addTask(task.name);
} }
@@ -125,7 +129,18 @@ public class WorkflowCuratorServiceImpl implements WorkflowCuratorService {
basicWorkflowItemService.update(c, wfi); basicWorkflowItemService.update(c, wfi);
return false; return false;
} else { } else {
return curate(curator, c, wfi); PluginService plugins = CoreServiceFactory.getInstance()
.getPluginService();
try (Reporter reporter
= (Reporter) plugins
.getSinglePlugin(Reporter.class);) {
curator.setReporter(reporter);
boolean status = curate(curator, c, wfi);
reporter.close();
return status;
} catch (Exception e) {
log.error("Failed to close report", e);
}
} }
} }
return true; return true;

View File

@@ -262,9 +262,10 @@ public class IndexClient {
* @param line the command line options * @param line the command line options
* @param indexer the solr indexer * @param indexer the solr indexer
* @throws SearchServiceException in case of a solr exception * @throws SearchServiceException in case of a solr exception
* @throws java.io.IOException passed through
*/ */
protected static void checkRebuildSpellCheck(CommandLine line, IndexingService indexer) protected static void checkRebuildSpellCheck(CommandLine line, IndexingService indexer)
throws SearchServiceException { throws SearchServiceException, IOException {
if (line.hasOption("s")) { if (line.hasOption("s")) {
log.info("Rebuilding spell checker."); log.info("Rebuilding spell checker.");
indexer.buildSpellCheck(); indexer.buildSpellCheck();

View File

@@ -64,5 +64,5 @@ public interface IndexingService {
void optimize() throws SearchServiceException; void optimize() throws SearchServiceException;
void buildSpellCheck() throws SearchServiceException; void buildSpellCheck() throws SearchServiceException, IOException;
} }

View File

@@ -46,11 +46,11 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.validator.routines.UrlValidator; import org.apache.commons.validator.routines.UrlValidator;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
@@ -203,14 +203,14 @@ public class SolrServiceImpl implements SearchService, IndexingService {
/** /**
* Non-Static SolrServer for processing indexing events. * Non-Static SolrServer for processing indexing events.
*/ */
protected SolrServer solr = null; protected SolrClient solr = null;
protected SolrServiceImpl() { protected SolrServiceImpl() {
} }
protected SolrServer getSolr() { protected SolrClient getSolr() {
if (solr == null) { if (solr == null) {
String solrService = DSpaceServicesFactory.getInstance().getConfigurationService() String solrService = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("discovery.search.server"); .getProperty("discovery.search.server");
@@ -220,7 +220,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
.getBooleanProperty("discovery", "solr.url.validation.enabled", true)) { .getBooleanProperty("discovery", "solr.url.validation.enabled", true)) {
try { try {
log.debug("Solr URL: " + solrService); log.debug("Solr URL: " + solrService);
HttpSolrServer solrServer = new HttpSolrServer(solrService); HttpSolrClient solrServer = new HttpSolrClient.Builder(solrService).build();
solrServer.setBaseURL(solrService); solrServer.setBaseURL(solrService);
solrServer.setUseMultiPartPost(true); solrServer.setUseMultiPartPost(true);
@@ -236,7 +236,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
DatabaseUtils.checkReindexDiscovery(this); DatabaseUtils.checkReindexDiscovery(this);
solr = solrServer; solr = solrServer;
} catch (SolrServerException e) { } catch (SolrServerException | IOException e) {
log.error("Error while initializing solr server", e); log.error("Error while initializing solr server", e);
} }
} else { } else {
@@ -526,7 +526,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
if (force) { if (force) {
try { try {
getSolr().deleteByQuery( getSolr().deleteByQuery(
"search.resourcetype:[" + Constants.ITEM + " TO " + Constants.WORKFLOW_CLAIMED + "]"); "search.resourcetype:[" + Constants.ITEM + " TO " + Constants.CLAIMEDTASK + "]");
} catch (Exception e) { } catch (Exception e) {
throw new SearchServiceException(e.getMessage(), e); throw new SearchServiceException(e.getMessage(), e);
} }
@@ -535,8 +535,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
cleanIndex(false, Constants.COLLECTION); cleanIndex(false, Constants.COLLECTION);
cleanIndex(false, Constants.COMMUNITY); cleanIndex(false, Constants.COMMUNITY);
cleanIndex(false, Constants.WORKSPACEITEM); cleanIndex(false, Constants.WORKSPACEITEM);
cleanIndex(false, Constants.WORKFLOW_POOL); cleanIndex(false, Constants.POOLTASK);
cleanIndex(false, Constants.WORKFLOW_CLAIMED); cleanIndex(false, Constants.CLAIMEDTASK);
cleanIndex(false, Constants.WORKFLOWITEM); cleanIndex(false, Constants.WORKFLOWITEM);
} }
} }
@@ -613,7 +613,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} }
@Override @Override
public void buildSpellCheck() throws SearchServiceException { public void buildSpellCheck()
throws SearchServiceException, IOException {
try { try {
if (getSolr() == null) { if (getSolr() == null) {
return; return;
@@ -1636,7 +1637,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
if (claimedTasks != null) { if (claimedTasks != null) {
for (ClaimedTask claimedTask : claimedTasks) { for (ClaimedTask claimedTask : claimedTasks) {
SolrInputDocument claimDoc = doc.deepCopy(); SolrInputDocument claimDoc = doc.deepCopy();
addBasicInfoToDocument(claimDoc, Constants.WORKFLOW_CLAIMED, claimedTask.getID(), null, locations); addBasicInfoToDocument(claimDoc, Constants.CLAIMEDTASK, claimedTask.getID(), null, locations);
addFacetIndex(claimDoc, "action", claimedTask.getActionID(), claimedTask.getActionID()); addFacetIndex(claimDoc, "action", claimedTask.getActionID(), claimedTask.getActionID());
addFacetIndex(claimDoc, "step", claimedTask.getStepID(), claimedTask.getStepID()); addFacetIndex(claimDoc, "step", claimedTask.getStepID(), claimedTask.getStepID());
@@ -1657,7 +1658,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
if (pools != null) { if (pools != null) {
for (PoolTask poolTask : pools) { for (PoolTask poolTask : pools) {
SolrInputDocument claimDoc = doc.deepCopy(); SolrInputDocument claimDoc = doc.deepCopy();
addBasicInfoToDocument(claimDoc, Constants.WORKFLOW_POOL, poolTask.getID(), null, locations); addBasicInfoToDocument(claimDoc, Constants.POOLTASK, poolTask.getID(), null, locations);
addFacetIndex(claimDoc, "action", poolTask.getActionID(), poolTask.getActionID()); addFacetIndex(claimDoc, "action", poolTask.getActionID(), poolTask.getActionID());
addFacetIndex(claimDoc, "step", poolTask.getStepID(), poolTask.getStepID()); addFacetIndex(claimDoc, "step", poolTask.getStepID(), poolTask.getStepID());
@@ -1939,7 +1940,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
order = SolrQuery.ORDER.desc; order = SolrQuery.ORDER.desc;
} }
solrQuery.addSortField(discoveryQuery.getSortField(), order); solrQuery.addSort(discoveryQuery.getSortField(), order);
} }
for (String property : discoveryQuery.getProperties().keySet()) { for (String property : discoveryQuery.getProperties().keySet()) {
@@ -2219,8 +2220,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
switch (type) { switch (type) {
case Constants.WORKSPACEITEM: case Constants.WORKSPACEITEM:
case Constants.WORKFLOWITEM: case Constants.WORKFLOWITEM:
case Constants.WORKFLOW_POOL: case Constants.POOLTASK:
case Constants.WORKFLOW_CLAIMED: case Constants.CLAIMEDTASK:
uid = Integer.parseInt((String) id); uid = Integer.parseInt((String) id);
break; break;
default: default:
@@ -2261,7 +2262,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQuery.setStart(offset); solrQuery.setStart(offset);
solrQuery.setRows(max); solrQuery.setRows(max);
if (orderfield != null) { if (orderfield != null) {
solrQuery.setSortField(orderfield, ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc); solrQuery.addSort(orderfield, ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc);
} }
if (filterquery != null) { if (filterquery != null) {
solrQuery.addFilterQuery(filterquery); solrQuery.addFilterQuery(filterquery);

View File

@@ -45,10 +45,10 @@ import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.http.HttpResponse; import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest;
@@ -107,7 +107,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
org.apache.logging.log4j.LogManager.getLogger(SolrLoggerServiceImpl.class); org.apache.logging.log4j.LogManager.getLogger(SolrLoggerServiceImpl.class);
private static final String MULTIPLE_VALUES_SPLITTER = "|"; private static final String MULTIPLE_VALUES_SPLITTER = "|";
protected SolrServer solr; protected SolrClient solr;
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
@@ -115,7 +115,9 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected DatabaseReader locationService; protected DatabaseReader locationService;
private static List<String> statisticYearCores = new ArrayList<String>(); protected boolean useProxies;
private static final List<String> statisticYearCores = new ArrayList<>();
private static boolean statisticYearCoresInit = false; private static boolean statisticYearCoresInit = false;
@Autowired(required = true) @Autowired(required = true)
@@ -154,11 +156,11 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
log.info("solr-statistics.server:" + configurationService.getProperty("solr-statistics.server")); log.info("solr-statistics.server:" + configurationService.getProperty("solr-statistics.server"));
log.info("usage-statistics.dbfile:" + configurationService.getProperty("usage-statistics.dbfile")); log.info("usage-statistics.dbfile:" + configurationService.getProperty("usage-statistics.dbfile"));
HttpSolrServer server = null; HttpSolrClient server = null;
if (configurationService.getProperty("solr-statistics.server") != null) { if (configurationService.getProperty("solr-statistics.server") != null) {
try { try {
server = new HttpSolrServer(configurationService.getProperty("solr-statistics.server")); server = new HttpSolrClient.Builder(configurationService.getProperty("solr-statistics.server")).build();
} catch (Exception e) { } catch (Exception e) {
log.error("Error accessing Solr server configured in 'solr-statistics.server'", e); log.error("Error accessing Solr server configured in 'solr-statistics.server'", e);
} }
@@ -346,14 +348,14 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
if (dspaceObject != null) { if (dspaceObject != null) {
doc1.addField("id", dspaceObject.getID()); doc1.addField("id", dspaceObject.getID().toString());
doc1.addField("type", dspaceObject.getType()); doc1.addField("type", dspaceObject.getType());
storeParents(doc1, dspaceObject); storeParents(doc1, dspaceObject);
} }
// Save the current time // Save the current time
doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601)); doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601));
if (currentUser != null) { if (currentUser != null) {
doc1.addField("epersonid", currentUser.getID()); doc1.addField("epersonid", currentUser.getID().toString());
} }
return doc1; return doc1;
@@ -416,14 +418,14 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
if (dspaceObject != null) { if (dspaceObject != null) {
doc1.addField("id", dspaceObject.getID()); doc1.addField("id", dspaceObject.getID().toString());
doc1.addField("type", dspaceObject.getType()); doc1.addField("type", dspaceObject.getType());
storeParents(doc1, dspaceObject); storeParents(doc1, dspaceObject);
} }
// Save the current time // Save the current time
doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601)); doc1.addField("time", DateFormatUtils.format(new Date(), DATE_FORMAT_8601));
if (currentUser != null) { if (currentUser != null) {
doc1.addField("epersonid", currentUser.getID()); doc1.addField("epersonid", currentUser.getID().toString());
} }
return doc1; return doc1;
@@ -452,7 +454,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
//Store the scope //Store the scope
if (scope != null) { if (scope != null) {
solrDoc.addField("scopeId", scope.getID()); solrDoc.addField("scopeId", scope.getID().toString());
solrDoc.addField("scopeType", scope.getType()); solrDoc.addField("scopeType", scope.getType());
} }
@@ -487,7 +489,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null); SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null);
//Log the current collection & the scope ! //Log the current collection & the scope !
solrDoc.addField("owningColl", usageWorkflowEvent.getScope().getID()); solrDoc.addField("owningColl", usageWorkflowEvent.getScope().getID().toString());
storeParents(solrDoc, usageWorkflowEvent.getScope()); storeParents(solrDoc, usageWorkflowEvent.getScope());
if (usageWorkflowEvent.getWorkflowStep() != null) { if (usageWorkflowEvent.getWorkflowStep() != null) {
@@ -499,25 +501,25 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
if (usageWorkflowEvent.getGroupOwners() != null) { if (usageWorkflowEvent.getGroupOwners() != null) {
for (int i = 0; i < usageWorkflowEvent.getGroupOwners().length; i++) { for (int i = 0; i < usageWorkflowEvent.getGroupOwners().length; i++) {
Group group = usageWorkflowEvent.getGroupOwners()[i]; Group group = usageWorkflowEvent.getGroupOwners()[i];
solrDoc.addField("owner", "g" + group.getID()); solrDoc.addField("owner", "g" + group.getID().toString());
} }
} }
if (usageWorkflowEvent.getEpersonOwners() != null) { if (usageWorkflowEvent.getEpersonOwners() != null) {
for (int i = 0; i < usageWorkflowEvent.getEpersonOwners().length; i++) { for (int i = 0; i < usageWorkflowEvent.getEpersonOwners().length; i++) {
EPerson ePerson = usageWorkflowEvent.getEpersonOwners()[i]; EPerson ePerson = usageWorkflowEvent.getEpersonOwners()[i];
solrDoc.addField("owner", "e" + ePerson.getID()); solrDoc.addField("owner", "e" + ePerson.getID().toString());
} }
} }
solrDoc.addField("workflowItemId", usageWorkflowEvent.getWorkflowItem().getID()); solrDoc.addField("workflowItemId", usageWorkflowEvent.getWorkflowItem().getID().toString());
EPerson submitter = ((Item) usageWorkflowEvent.getObject()).getSubmitter(); EPerson submitter = ((Item) usageWorkflowEvent.getObject()).getSubmitter();
if (submitter != null) { if (submitter != null) {
solrDoc.addField("submitter", submitter.getID()); solrDoc.addField("submitter", submitter.getID().toString());
} }
solrDoc.addField("statistics_type", StatisticsType.WORKFLOW.text()); solrDoc.addField("statistics_type", StatisticsType.WORKFLOW.text());
if (usageWorkflowEvent.getActor() != null) { if (usageWorkflowEvent.getActor() != null) {
solrDoc.addField("actor", usageWorkflowEvent.getActor().getID()); solrDoc.addField("actor", usageWorkflowEvent.getActor().getID().toString());
} }
solr.add(solrDoc); solr.add(solrDoc);
@@ -535,21 +537,21 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
Community comm = (Community) dso; Community comm = (Community) dso;
List<Community> parentCommunities = comm.getParentCommunities(); List<Community> parentCommunities = comm.getParentCommunities();
for (Community parent : parentCommunities) { for (Community parent : parentCommunities) {
doc1.addField("owningComm", parent.getID()); doc1.addField("owningComm", parent.getID().toString());
storeParents(doc1, parent); storeParents(doc1, parent);
} }
} else if (dso instanceof Collection) { } else if (dso instanceof Collection) {
Collection coll = (Collection) dso; Collection coll = (Collection) dso;
List<Community> communities = coll.getCommunities(); List<Community> communities = coll.getCommunities();
for (Community community : communities) { for (Community community : communities) {
doc1.addField("owningComm", community.getID()); doc1.addField("owningComm", community.getID().toString());
storeParents(doc1, community); storeParents(doc1, community);
} }
} else if (dso instanceof Item) { } else if (dso instanceof Item) {
Item item = (Item) dso; Item item = (Item) dso;
List<Collection> collections = item.getCollections(); List<Collection> collections = item.getCollections();
for (Collection collection : collections) { for (Collection collection : collections) {
doc1.addField("owningColl", collection.getID()); doc1.addField("owningColl", collection.getID().toString());
storeParents(doc1, collection); storeParents(doc1, collection);
} }
} else if (dso instanceof Bitstream) { } else if (dso instanceof Bitstream) {
@@ -558,7 +560,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
for (Bundle bundle : bundles) { for (Bundle bundle : bundles) {
List<Item> items = bundle.getItems(); List<Item> items = bundle.getItems();
for (Item item : items) { for (Item item : items) {
doc1.addField("owningItem", item.getID()); doc1.addField("owningItem", item.getID().toString());
storeParents(doc1, item); storeParents(doc1, item);
} }
} }
@@ -579,12 +581,13 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public Map<String, List<String>> queryField(String query, public Map<String, List<String>> queryField(String query,
List oldFieldVals, String field) { List oldFieldVals, String field)
Map<String, List<String>> currentValsStored = new HashMap<String, List<String>>(); throws IOException {
Map<String, List<String>> currentValsStored = new HashMap<>();
try { try {
// Get one document (since all the metadata for all the values // Get one document (since all the metadata for all the values
// should be the same just get the first one we find // should be the same just get the first one we find
Map<String, String> params = new HashMap<String, String>(); Map<String, String> params = new HashMap<>();
params.put("q", query); params.put("q", query);
params.put("rows", "1"); params.put("rows", "1");
MapSolrParams solrParams = new MapSolrParams(params); MapSolrParams solrParams = new MapSolrParams(params);
@@ -602,8 +605,18 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
public class ResultProcessor { public class ResultProcessor {
private SolrInputDocument toSolrInputDocument(SolrDocument d) {
SolrInputDocument doc = new SolrInputDocument();
for (String name : d.getFieldNames()) {
doc.addField(name, d.getFieldValue(name));
}
return doc;
}
public void execute(String query) throws SolrServerException, IOException { public void execute(String query) throws SolrServerException, IOException {
Map<String, String> params = new HashMap<String, String>(); Map<String, String> params = new HashMap<>();
params.put("q", query); params.put("q", query);
params.put("rows", "10"); params.put("rows", "10");
if (0 < statisticYearCores.size()) { if (0 < statisticYearCores.size()) {
@@ -612,17 +625,23 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
MapSolrParams solrParams = new MapSolrParams(params); MapSolrParams solrParams = new MapSolrParams(params);
QueryResponse response = solr.query(solrParams); QueryResponse response = solr.query(solrParams);
long numbFound = response.getResults().getNumFound(); SolrDocumentList results = response.getResults();
long numbFound = results.getNumFound();
// process the first batch // process the first batch
process(response.getResults()); for (SolrDocument result : results) {
process(toSolrInputDocument(result));
}
// Run over the rest // Run over the rest
for (int i = 10; i < numbFound; i += 10) { for (int i = 10; i < numbFound; i += 10) {
params.put("start", String.valueOf(i)); params.put("start", String.valueOf(i));
solrParams = new MapSolrParams(params); solrParams = new MapSolrParams(params);
response = solr.query(solrParams); response = solr.query(solrParams);
process(response.getResults()); results = response.getResults();
for (SolrDocument result : results) {
process(toSolrInputDocument(result));
}
} }
} }
@@ -638,8 +657,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
*/ */
public void process(List<SolrDocument> docs) throws IOException, SolrServerException { public void process(List<SolrInputDocument> docs) throws IOException, SolrServerException {
for (SolrDocument doc : docs) { for (SolrInputDocument doc : docs) {
process(doc); process(doc);
} }
} }
@@ -651,7 +670,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
*/ */
public void process(SolrDocument doc) throws IOException, SolrServerException { public void process(SolrInputDocument doc) throws IOException, SolrServerException {
} }
@@ -667,11 +686,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
/* Result Process to alter record to be identified as a bot */ /* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor() { ResultProcessor processor = new ResultProcessor() {
@Override @Override
public void process(SolrDocument doc) throws IOException, SolrServerException { public void process(SolrInputDocument doc) throws IOException, SolrServerException {
doc.removeFields("isBot"); doc.removeField("isBot");
doc.addField("isBot", true); doc.addField("isBot", true);
SolrInputDocument newInput = ClientUtils.toSolrInputDocument(doc); solr.add(doc);
solr.add(newInput);
log.info("Marked " + doc.getFieldValue("ip") + " as bot"); log.info("Marked " + doc.getFieldValue("ip") + " as bot");
} }
}; };
@@ -697,11 +715,10 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
/* Result Process to alter record to be identified as a bot */ /* Result Process to alter record to be identified as a bot */
ResultProcessor processor = new ResultProcessor() { ResultProcessor processor = new ResultProcessor() {
@Override @Override
public void process(SolrDocument doc) throws IOException, SolrServerException { public void process(SolrInputDocument doc) throws IOException, SolrServerException {
doc.removeFields("isBot"); doc.removeField("isBot");
doc.addField("isBot", true); doc.addField("isBot", true);
SolrInputDocument newInput = ClientUtils.toSolrInputDocument(doc); solr.add(doc);
solr.add(newInput);
} }
}; };
@@ -748,11 +765,12 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
// QueryResponse queryResponse = solr.query()//query(query, null, -1, // QueryResponse queryResponse = solr.query()//query(query, null, -1,
// null, null, null); // null, null, null);
final List<SolrDocument> docsToUpdate = new ArrayList<SolrDocument>(); final List<SolrInputDocument> docsToUpdate = new ArrayList<>();
ResultProcessor processor = new ResultProcessor() { ResultProcessor processor = new ResultProcessor() {
@Override @Override
public void process(List<SolrDocument> docs) throws IOException, SolrServerException { public void process(List<SolrInputDocument> docs)
throws IOException, SolrServerException {
docsToUpdate.addAll(docs); docsToUpdate.addAll(docs);
} }
}; };
@@ -764,7 +782,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
// Add the new (updated onces // Add the new (updated onces
for (int i = 0; i < docsToUpdate.size(); i++) { for (int i = 0; i < docsToUpdate.size(); i++) {
SolrDocument solrDocument = docsToUpdate.get(i); SolrInputDocument solrDocument = docsToUpdate.get(i);
// Now loop over our fieldname actions // Now loop over our fieldname actions
for (int j = 0; j < fieldNames.size(); j++) { for (int j = 0; j < fieldNames.size(); j++) {
String fieldName = fieldNames.get(j); String fieldName = fieldNames.get(j);
@@ -772,7 +790,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
if (action.equals("addOne") || action.equals("replace")) { if (action.equals("addOne") || action.equals("replace")) {
if (action.equals("replace")) { if (action.equals("replace")) {
solrDocument.removeFields(fieldName); solrDocument.removeField(fieldName);
} }
for (Object fieldValue : fieldValues) { for (Object fieldValue : fieldValues) {
@@ -782,7 +800,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
// Remove the field // Remove the field
java.util.Collection<Object> values = solrDocument java.util.Collection<Object> values = solrDocument
.getFieldValues(fieldName); .getFieldValues(fieldName);
solrDocument.removeFields(fieldName); solrDocument.removeField(fieldName);
for (Object value : values) { for (Object value : values) {
// Keep all the values besides the one we need to remove // Keep all the values besides the one we need to remove
if (!fieldValues.contains((value))) { if (!fieldValues.contains((value))) {
@@ -791,9 +809,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
} }
} }
SolrInputDocument newInput = ClientUtils solr.add(solrDocument);
.toSolrInputDocument(solrDocument);
solr.add(newInput);
} }
solr.commit(); solr.commit();
// System.out.println("SolrLogger.update(\""+query+"\"):"+(new // System.out.println("SolrLogger.update(\""+query+"\"):"+(new
@@ -801,14 +817,16 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
@Override @Override
public void query(String query, int max) throws SolrServerException { public void query(String query, int max)
throws SolrServerException, IOException {
query(query, null, null, 0, max, null, null, null, null, null, false); query(query, null, null, 0, max, null, null, null, null, null, false);
} }
@Override @Override
public ObjectCount[] queryFacetField(String query, public ObjectCount[] queryFacetField(String query,
String filterQuery, String facetField, int max, boolean showTotal, String filterQuery, String facetField, int max, boolean showTotal,
List<String> facetQueries) throws SolrServerException { List<String> facetQueries)
throws SolrServerException, IOException {
QueryResponse queryResponse = query(query, filterQuery, facetField, QueryResponse queryResponse = query(query, filterQuery, facetField,
0, max, null, null, null, facetQueries, null, false); 0, max, null, null, null, facetQueries, null, false);
if (queryResponse == null) { if (queryResponse == null) {
@@ -844,7 +862,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public ObjectCount[] queryFacetDate(String query, public ObjectCount[] queryFacetDate(String query,
String filterQuery, int max, String dateType, String dateStart, String filterQuery, int max, String dateType, String dateStart,
String dateEnd, boolean showTotal, Context context) throws SolrServerException { String dateEnd, boolean showTotal, Context context)
throws SolrServerException, IOException {
QueryResponse queryResponse = query(query, filterQuery, null, 0, max, QueryResponse queryResponse = query(query, filterQuery, null, 0, max,
dateType, dateStart, dateEnd, null, null, false); dateType, dateStart, dateEnd, null, null, false);
if (queryResponse == null) { if (queryResponse == null) {
@@ -876,7 +895,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public Map<String, Integer> queryFacetQuery(String query, public Map<String, Integer> queryFacetQuery(String query,
String filterQuery, List<String> facetQueries) String filterQuery, List<String> facetQueries)
throws SolrServerException { throws SolrServerException, IOException {
QueryResponse response = query(query, filterQuery, null, 0, 1, null, null, QueryResponse response = query(query, filterQuery, null, 0, 1, null, null,
null, facetQueries, null, false); null, facetQueries, null, false);
return response.getFacetQuery(); return response.getFacetQuery();
@@ -884,7 +903,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public ObjectCount queryTotal(String query, String filterQuery) public ObjectCount queryTotal(String query, String filterQuery)
throws SolrServerException { throws SolrServerException, IOException {
QueryResponse queryResponse = query(query, filterQuery, null, 0, -1, null, QueryResponse queryResponse = query(query, filterQuery, null, 0, -1, null,
null, null, null, null, false); null, null, null, null, false);
ObjectCount objCount = new ObjectCount(); ObjectCount objCount = new ObjectCount();
@@ -942,7 +961,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
public QueryResponse query(String query, String filterQuery, public QueryResponse query(String query, String filterQuery,
String facetField, int rows, int max, String dateType, String dateStart, String facetField, int rows, int max, String dateType, String dateStart,
String dateEnd, List<String> facetQueries, String sort, boolean ascending) String dateEnd, List<String> facetQueries, String sort, boolean ascending)
throws SolrServerException { throws SolrServerException, IOException {
if (solr == null) { if (solr == null) {
return null; return null;
} }
@@ -1001,7 +1020,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
if (sort != null) { if (sort != null) {
solrQuery.setSortField(sort, (ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc)); solrQuery.addSort(sort, (ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc));
} }
String[] bundles = configurationService.getArrayProperty("solr-statistics.query.filter.bundles"); String[] bundles = configurationService.getArrayProperty("solr-statistics.query.filter.bundles");
@@ -1035,7 +1054,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
try { try {
// solr.set // solr.set
response = solr.query(solrQuery); response = solr.query(solrQuery);
} catch (SolrServerException e) { } catch (SolrServerException | IOException e) {
log.error("Error searching Solr usage events using query {}", query, e); log.error("Error searching Solr usage events using query {}", query, e);
throw e; throw e;
} }
@@ -1086,7 +1105,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public void shardSolrIndex() throws IOException, SolrServerException { public void shardSolrIndex() throws IOException, SolrServerException {
if (!(solr instanceof HttpSolrServer)) { if (!(solr instanceof HttpSolrClient)) {
return; return;
} }
@@ -1152,14 +1171,14 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
//Start by creating a new core //Start by creating a new core
String coreName = "statistics-" + dcStart.getYearUTC(); String coreName = "statistics-" + dcStart.getYearUTC();
HttpSolrServer statisticsYearServer = createCore((HttpSolrServer) solr, coreName); HttpSolrClient statisticsYearServer = createCore((HttpSolrClient) solr, coreName);
System.out.println("Moving: " + totalRecords + " into core " + coreName); System.out.println("Moving: " + totalRecords + " into core " + coreName);
log.info("Moving: " + totalRecords + " records into core " + coreName); log.info("Moving: " + totalRecords + " records into core " + coreName);
List<File> filesToUpload = new ArrayList<File>(); List<File> filesToUpload = new ArrayList<>();
for (int i = 0; i < totalRecords; i += 10000) { for (int i = 0; i < totalRecords; i += 10000) {
String solrRequestUrl = ((HttpSolrServer) solr).getBaseURL() + "/select"; String solrRequestUrl = ((HttpSolrClient) solr).getBaseURL() + "/select";
solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams); solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);
HttpGet get = new HttpGet(solrRequestUrl); HttpGet get = new HttpGet(solrRequestUrl);
@@ -1210,13 +1229,13 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
FileUtils.deleteDirectory(tempDirectory); FileUtils.deleteDirectory(tempDirectory);
} }
protected HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException { protected HttpSolrClient createCore(HttpSolrClient solr, String coreName) throws IOException, SolrServerException {
String solrDir = configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator; String solrDir = configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator;
String baseSolrUrl = solr.getBaseURL().replace("statistics", ""); String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
//DS-3458: Test to see if a solr core already exists. If it exists, return that server. Otherwise create a //DS-3458: Test to see if a solr core already exists. If it exists, return that server. Otherwise create a
// new one. // new one.
HttpSolrServer returnServer = new HttpSolrServer(baseSolrUrl + "/" + coreName); HttpSolrClient returnServer = new HttpSolrClient.Builder(baseSolrUrl + "/" + coreName).build();
try { try {
SolrPingResponse ping = returnServer.ping(); SolrPingResponse ping = returnServer.ping();
log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus())); log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus()));
@@ -1234,7 +1253,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
//The config files for a statistics shard reside wihtin the statistics repository //The config files for a statistics shard reside wihtin the statistics repository
create.setInstanceDir("statistics"); create.setInstanceDir("statistics");
create.setDataDir(solrDir + coreName + File.separator + "data"); create.setDataDir(solrDir + coreName + File.separator + "data");
HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl); HttpSolrClient solrServer = new HttpSolrClient.Builder(baseSolrUrl).build();
create.process(solrServer); create.process(solrServer);
log.info("Created core with name: " + coreName); log.info("Created core with name: " + coreName);
return returnServer; return returnServer;
@@ -1268,7 +1287,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception { public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
if (!(solr instanceof HttpSolrServer)) { if (!(solr instanceof HttpSolrClient)) {
return; return;
} }
@@ -1288,16 +1307,16 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
File tempDirectory = new File( File tempDirectory = new File(
configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator); configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
tempDirectory.mkdirs(); tempDirectory.mkdirs();
List<File> tempCsvFiles = new ArrayList<File>(); List<File> tempCsvFiles = new ArrayList<>();
for (int i = 0; i < totalRecords; i += 10000) { for (int i = 0; i < totalRecords; i += 10000) {
Map<String, String> params = new HashMap<String, String>(); Map<String, String> params = new HashMap<>();
params.put(CommonParams.Q, "*:*"); params.put(CommonParams.Q, "*:*");
params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM); params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
params.put(CommonParams.WT, "csv"); params.put(CommonParams.WT, "csv");
params.put(CommonParams.ROWS, String.valueOf(10000)); params.put(CommonParams.ROWS, String.valueOf(10000));
params.put(CommonParams.START, String.valueOf(i)); params.put(CommonParams.START, String.valueOf(i));
String solrRequestUrl = ((HttpSolrServer) solr).getBaseURL() + "/select"; String solrRequestUrl = ((HttpSolrClient) solr).getBaseURL() + "/select";
solrRequestUrl = generateURL(solrRequestUrl, params); solrRequestUrl = generateURL(solrRequestUrl, params);
HttpGet get = new HttpGet(solrRequestUrl); HttpGet get = new HttpGet(solrRequestUrl);
@@ -1507,7 +1526,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
* This code is synchonized in the event that 2 threads trigger the initialization at the same time. * This code is synchonized in the event that 2 threads trigger the initialization at the same time.
*/ */
protected synchronized void initSolrYearCores() { protected synchronized void initSolrYearCores() {
if (statisticYearCoresInit || !(solr instanceof HttpSolrServer)) { if (statisticYearCoresInit || !(solr instanceof HttpSolrClient)) {
return; return;
} }
try { try {
@@ -1523,17 +1542,20 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
} }
}); });
//Base url should like : http://localhost:{port.number}/solr //Base url should like : http://localhost:{port.number}/solr
String baseSolrUrl = ((HttpSolrServer) solr).getBaseURL().replace("statistics", ""); String baseSolrUrl = ((HttpSolrClient) solr).getBaseURL().replace("statistics", "");
for (File solrCoreFile : solrCoreFiles) { for (File solrCoreFile : solrCoreFiles) {
log.info("Loading core with name: " + solrCoreFile.getName()); log.info("Loading core with name: " + solrCoreFile.getName());
createCore((HttpSolrServer) solr, solrCoreFile.getName()); createCore((HttpSolrClient) solr, solrCoreFile.getName());
//Add it to our cores list so we can query it ! //Add it to our cores list so we can query it !
statisticYearCores statisticYearCores
.add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName()); .add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName());
} }
//Also add the core containing the current year ! //Also add the core containing the current year !
statisticYearCores.add(((HttpSolrServer) solr).getBaseURL().replace("http://", "").replace("https://", "")); statisticYearCores.add(((HttpSolrClient) solr)
.getBaseURL()
.replace("http://", "")
.replace("https://", ""));
} catch (Exception e) { } catch (Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
} }

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.statistics.content; package org.dspace.statistics.content;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@@ -48,7 +49,7 @@ public class StatisticsBSAdapter {
public StatisticsBSAdapter() { public StatisticsBSAdapter() {
displayItemViews = false; displayItemViews = false;
displayBitstreamViews = false; displayBitstreamViews = false;
filters = new ArrayList<StatisticsFilter>(); filters = new ArrayList<>();
solrLoggerService = StatisticsServiceFactory.getInstance().getSolrLoggerService(); solrLoggerService = StatisticsServiceFactory.getInstance().getSolrLoggerService();
} }
@@ -60,8 +61,10 @@ public class StatisticsBSAdapter {
* @param item the item from which we need our visits * @param item the item from which we need our visits
* @return the number of visits * @return the number of visits
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
* @throws java.io.IOException passed through.
*/ */
public long getNumberOfVisits(int visitType, Item item) throws SolrServerException { public long getNumberOfVisits(int visitType, Item item)
throws SolrServerException, IOException {
switch (visitType) { switch (visitType) {
case ITEM_VISITS: case ITEM_VISITS:
return solrLoggerService return solrLoggerService

View File

@@ -59,7 +59,7 @@ public class StatisticsDataSearches extends StatisticsData {
} }
List<StatisticsFilter> filters = getFilters(); List<StatisticsFilter> filters = getFilters();
List<String> defaultFilters = new ArrayList<String>(); List<String> defaultFilters = new ArrayList<>();
for (StatisticsFilter statisticsFilter : filters) { for (StatisticsFilter statisticsFilter : filters) {
defaultFilters.add(statisticsFilter.toQuery()); defaultFilters.add(statisticsFilter.toQuery());
} }
@@ -221,7 +221,8 @@ public class StatisticsDataSearches extends StatisticsData {
return query; return query;
} }
protected ObjectCount getTotalPageViews(String query, String defaultFilterQuery) throws SolrServerException { protected ObjectCount getTotalPageViews(String query, String defaultFilterQuery)
throws SolrServerException, IOException {
StringBuilder fqBuffer; StringBuilder fqBuffer;
fqBuffer = new StringBuilder(defaultFilterQuery); fqBuffer = new StringBuilder(defaultFilterQuery);
if (0 < fqBuffer.length()) { if (0 < fqBuffer.length()) {

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.statistics.content; package org.dspace.statistics.content;
import java.io.IOException;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
@@ -117,7 +118,7 @@ public class StatisticsDataVisits extends StatisticsData {
@Override @Override
public Dataset createDataset(Context context) throws SQLException, public Dataset createDataset(Context context) throws SQLException,
SolrServerException, ParseException { SolrServerException, ParseException, IOException {
// Check if we already have one. // Check if we already have one.
// If we do then give it back. // If we do then give it back.
if (getDataset() != null) { if (getDataset() != null) {
@@ -127,7 +128,7 @@ public class StatisticsDataVisits extends StatisticsData {
/////////////////////////// ///////////////////////////
// 1. DETERMINE OUR AXIS // // 1. DETERMINE OUR AXIS //
/////////////////////////// ///////////////////////////
ArrayList<DatasetQuery> datasetQueries = new ArrayList<DatasetQuery>(); ArrayList<DatasetQuery> datasetQueries = new ArrayList<>();
for (int i = 0; i < getDatasetGenerators().size(); i++) { for (int i = 0; i < getDatasetGenerators().size(); i++) {
DatasetGenerator dataSet = getDatasetGenerators().get(i); DatasetGenerator dataSet = getDatasetGenerators().get(i);
processAxis(context, dataSet, datasetQueries); processAxis(context, dataSet, datasetQueries);
@@ -703,7 +704,8 @@ public class StatisticsDataVisits extends StatisticsData {
protected ObjectCount[] queryFacetField(DatasetQuery dataset, String query, protected ObjectCount[] queryFacetField(DatasetQuery dataset, String query,
String filterQuery) throws SolrServerException { String filterQuery)
throws SolrServerException, IOException {
String facetType = dataset.getFacetField() == null ? "id" : dataset String facetType = dataset.getFacetField() == null ? "id" : dataset
.getFacetField(); .getFacetField();
return solrLoggerService.queryFacetField(query, filterQuery, facetType, return solrLoggerService.queryFacetField(query, filterQuery, facetType,

View File

@@ -38,7 +38,7 @@ import org.dspace.statistics.SolrLoggerServiceImpl;
import org.dspace.statistics.content.filter.StatisticsFilter; import org.dspace.statistics.content.filter.StatisticsFilter;
/** /**
* A workflow data implementation that will query the statistics backend for workflow information * A workflow data implementation that will query the statistics backend for workflow information.
* *
* @author Kevin Van de Velde (kevin at atmire dot com) * @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com) * @author Ben Bosman (ben at atmire dot com)
@@ -166,19 +166,21 @@ public class StatisticsDataWorkflow extends StatisticsData {
* @param typeGenerator the type generator * @param typeGenerator the type generator
* @return counts for each facet by name. * @return counts for each facet by name.
* @throws org.apache.solr.client.solrj.SolrServerException passed through. * @throws org.apache.solr.client.solrj.SolrServerException passed through.
* @throws java.io.IOException passed through.
*/ */
protected Map<String, Long> getTotalFacetCounts(DatasetTypeGenerator typeGenerator) throws SolrServerException { protected Map<String, Long> getTotalFacetCounts(DatasetTypeGenerator typeGenerator)
throws SolrServerException, IOException {
ObjectCount[] objectCounts = solrLoggerService ObjectCount[] objectCounts = solrLoggerService
.queryFacetField(getQuery(), null, typeGenerator.getType(), -1, false, null); .queryFacetField(getQuery(), null, typeGenerator.getType(), -1, false, null);
Map<String, Long> result = new HashMap<String, Long>(); Map<String, Long> result = new HashMap<>();
for (ObjectCount objectCount : objectCounts) { for (ObjectCount objectCount : objectCounts) {
result.put(objectCount.getValue(), objectCount.getCount()); result.put(objectCount.getValue(), objectCount.getCount());
} }
return result; return result;
} }
protected Date getOldestWorkflowItemDate()
protected Date getOldestWorkflowItemDate() throws SolrServerException { throws SolrServerException, IOException {
ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
String workflowStartDate = configurationService.getProperty("usage-statistics.workflow-start-date"); String workflowStartDate = configurationService.getProperty("usage-statistics.workflow-start-date");
if (workflowStartDate == null) { if (workflowStartDate == null) {

View File

@@ -88,11 +88,12 @@ public interface SolrLoggerService {
* @throws IOException A general class of exceptions produced by failed or interrupted I/O operations. * @throws IOException A general class of exceptions produced by failed or interrupted I/O operations.
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
*/ */
public void removeIndex(String query) throws IOException, public void removeIndex(String query)
SolrServerException; throws IOException, SolrServerException;
public Map<String, List<String>> queryField(String query, public Map<String, List<String>> queryField(String query,
List oldFieldVals, String field); List oldFieldVals, String field)
throws IOException;
public void markRobotsByIP(); public void markRobotsByIP();
@@ -115,7 +116,8 @@ public interface SolrLoggerService {
List<String> fieldNames, List<List<Object>> fieldValuesList) List<String> fieldNames, List<List<Object>> fieldValuesList)
throws SolrServerException, IOException; throws SolrServerException, IOException;
public void query(String query, int max) throws SolrServerException; public void query(String query, int max)
throws SolrServerException, IOException;
/** /**
* Query used to get values grouped by the given facet field. * Query used to get values grouped by the given facet field.
@@ -130,10 +132,12 @@ public interface SolrLoggerService {
* @param facetQueries list of facet queries * @param facetQueries list of facet queries
* @return an array containing our results * @return an array containing our results
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
* @throws java.io.IOException passed through.
*/ */
public ObjectCount[] queryFacetField(String query, public ObjectCount[] queryFacetField(String query,
String filterQuery, String facetField, int max, boolean showTotal, String filterQuery, String facetField, int max, boolean showTotal,
List<String> facetQueries) throws SolrServerException; List<String> facetQueries)
throws SolrServerException, IOException;
/** /**
* Query used to get values grouped by the date. * Query used to get values grouped by the date.
@@ -152,22 +156,24 @@ public interface SolrLoggerService {
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @return and array containing our results * @return and array containing our results
* @throws SolrServerException Exception from the Solr server to the solrj Java client. * @throws SolrServerException Exception from the Solr server to the solrj Java client.
* @throws java.io.IOException passed through.
*/ */
public ObjectCount[] queryFacetDate(String query, public ObjectCount[] queryFacetDate(String query,
String filterQuery, int max, String dateType, String dateStart, String filterQuery, int max, String dateType, String dateStart,
String dateEnd, boolean showTotal, Context context) throws SolrServerException; String dateEnd, boolean showTotal, Context context)
throws SolrServerException, IOException;
public Map<String, Integer> queryFacetQuery(String query, public Map<String, Integer> queryFacetQuery(String query,
String filterQuery, List<String> facetQueries) String filterQuery, List<String> facetQueries)
throws SolrServerException; throws SolrServerException, IOException;
public ObjectCount queryTotal(String query, String filterQuery) public ObjectCount queryTotal(String query, String filterQuery)
throws SolrServerException; throws SolrServerException, IOException;
public QueryResponse query(String query, String filterQuery, public QueryResponse query(String query, String filterQuery,
String facetField, int rows, int max, String dateType, String dateStart, String facetField, int rows, int max, String dateType, String dateStart,
String dateEnd, List<String> facetQueries, String sort, boolean ascending) String dateEnd, List<String> facetQueries, String sort, boolean ascending)
throws SolrServerException; throws SolrServerException, IOException;
/** /**
* Returns in a filterQuery string all the ip addresses that should be ignored * Returns in a filterQuery string all the ip addresses that should be ignored

View File

@@ -36,7 +36,7 @@ import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -80,7 +80,7 @@ public class StatisticsImporter {
/** /**
* Solr server connection * Solr server connection
*/ */
private static HttpSolrServer solr; private static HttpSolrClient solr;
/** /**
* GEOIP lookup service * GEOIP lookup service
@@ -468,7 +468,7 @@ public class StatisticsImporter {
if (verbose) { if (verbose) {
System.out.println("Writing to solr server at: " + sserver); System.out.println("Writing to solr server at: " + sserver);
} }
solr = new HttpSolrServer(sserver); solr = new HttpSolrClient.Builder(sserver).build();
String dbPath = ConfigurationManager.getProperty("usage-statistics", "dbfile"); String dbPath = ConfigurationManager.getProperty("usage-statistics", "dbfile");
try { try {

View File

@@ -10,6 +10,7 @@ package org.dspace.storage.rdbms;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.sql.Connection; import java.sql.Connection;
import java.sql.DatabaseMetaData; import java.sql.DatabaseMetaData;
@@ -23,6 +24,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.sql.DataSource; import javax.sql.DataSource;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -88,7 +90,8 @@ public class DatabaseUtils {
// Usage checks // Usage checks
if (argv.length < 1) { if (argv.length < 1) {
System.out.println("\nDatabase action argument is missing."); System.out.println("\nDatabase action argument is missing.");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate' or 'clean'"); System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', 'validate', " +
"'update-sequences' or 'clean'");
System.out.println("\nOr, type 'database help' for more information.\n"); System.out.println("\nOr, type 'database help' for more information.\n");
System.exit(1); System.exit(1);
} }
@@ -328,23 +331,48 @@ public class DatabaseUtils {
e.printStackTrace(); e.printStackTrace();
System.exit(1); System.exit(1);
} }
} else if (argv[0].equalsIgnoreCase("update-sequences")) {
try (Connection connection = dataSource.getConnection()) {
String dbType = getDbType(connection);
String sqlfile = "org/dspace/storage/rdbms/sqlmigration/" + dbType +
"/update-sequences.sql";
InputStream sqlstream = DatabaseUtils.class.getClassLoader().getResourceAsStream(sqlfile);
if (sqlstream != null) {
String s = IOUtils.toString(sqlstream, "UTF-8");
if (!s.isEmpty()) {
System.out.println("Running " + sqlfile);
connection.createStatement().execute(s);
System.out.println("update-sequences complete");
} else {
System.err.println(sqlfile + " contains no SQL to execute");
}
} else {
System.err.println(sqlfile + " not found");
}
}
} else { } else {
System.out.println("\nUsage: database [action]"); System.out.println("\nUsage: database [action]");
System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair' or 'clean'"); System.out.println("Valid actions: 'test', 'info', 'migrate', 'repair', " +
"'update-sequences' or 'clean'");
System.out.println( System.out.println(
" - test = Performs a test connection to database to validate connection settings"); " - test = Performs a test connection to database to " +
"validate connection settings");
System.out.println( System.out.println(
" - info / status = Describe basic info/status about database, including validating the " + " - info / status = Describe basic info/status about database, including validating the " +
"compatibility of this database"); "compatibility of this database");
System.out.println(" - migrate = Migrate the database to the latest version");
System.out.println( System.out.println(
" - repair = Attempt to repair any previously failed database migrations or checksum " + " - migrate = Migrate the database to the latest version");
"mismatches (via Flyway repair)"); System.out.println(
" - repair = Attempt to repair any previously failed database " +
"migrations or checksum mismatches (via Flyway repair)");
System.out.println( System.out.println(
" - validate = Validate current database's migration status (via Flyway validate), " + " - validate = Validate current database's migration status (via Flyway validate), " +
"validating all migration checksums."); "validating all migration checksums.");
System.out.println( System.out.println(
" - clean = DESTROY all data and tables in database (WARNING there is no going back!). " + " - update-sequences = Update database sequences after running AIP ingest.");
System.out.println(
" - clean = DESTROY all data and tables in database " +
"(WARNING there is no going back!). " +
"Requires 'db.cleanDisabled=false' setting in config."); "Requires 'db.cleanDisabled=false' setting in config.");
System.out.println(""); System.out.println("");
System.exit(0); System.exit(0);

View File

@@ -31,7 +31,7 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest;
@@ -275,7 +275,7 @@ public class SolrImportExport {
} }
try { try {
HttpSolrServer adminSolr = new HttpSolrServer(baseSolrUrl); HttpSolrClient adminSolr = new HttpSolrClient.Builder(baseSolrUrl).build();
// try to find out size of core and compare with free space in export directory // try to find out size of core and compare with free space in export directory
CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr); CoreAdminResponse status = CoreAdminRequest.getStatus(indexName, adminSolr);
@@ -342,7 +342,7 @@ public class SolrImportExport {
} }
// commit changes // commit changes
HttpSolrServer origSolr = new HttpSolrServer(origSolrUrl); HttpSolrClient origSolr = new HttpSolrClient.Builder(origSolrUrl).build();
origSolr.commit(); origSolr.commit();
// swap back (statistics now going to actual core name in actual data dir) // swap back (statistics now going to actual core name in actual data dir)
@@ -424,7 +424,7 @@ public class SolrImportExport {
+ indexName); + indexName);
} }
HttpSolrServer solr = new HttpSolrServer(solrUrl); HttpSolrClient solr = new HttpSolrClient.Builder(solrUrl).build();
// must get multivalue fields before clearing // must get multivalue fields before clearing
List<String> multivaluedFields = getMultiValuedFields(solr); List<String> multivaluedFields = getMultiValuedFields(solr);
@@ -471,7 +471,7 @@ public class SolrImportExport {
* @param solr the solr server to query. * @param solr the solr server to query.
* @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any. * @return A list containing all multi-valued fields, or an empty list if none are found / there aren't any.
*/ */
private static List<String> getMultiValuedFields(HttpSolrServer solr) { private static List<String> getMultiValuedFields(HttpSolrClient solr) {
List<String> result = new ArrayList<>(); List<String> result = new ArrayList<>();
try { try {
LukeRequest request = new LukeRequest(); LukeRequest request = new LukeRequest();
@@ -497,7 +497,7 @@ public class SolrImportExport {
* @throws SolrServerException if there is a problem in communicating with Solr. * @throws SolrServerException if there is a problem in communicating with Solr.
*/ */
public static void clearIndex(String solrUrl) throws IOException, SolrServerException { public static void clearIndex(String solrUrl) throws IOException, SolrServerException {
HttpSolrServer solr = new HttpSolrServer(solrUrl); HttpSolrClient solr = new HttpSolrClient.Builder(solrUrl).build();
solr.deleteByQuery("*:*"); solr.deleteByQuery("*:*");
solr.commit(); solr.commit();
solr.optimize(); solr.optimize();
@@ -536,7 +536,7 @@ public class SolrImportExport {
+ indexName); + indexName);
} }
HttpSolrServer solr = new HttpSolrServer(solrUrl); HttpSolrClient solr = new HttpSolrClient.Builder(solrUrl).build();
SolrQuery query = new SolrQuery("*:*"); SolrQuery query = new SolrQuery("*:*");
if (StringUtils.isNotBlank(fromWhen)) { if (StringUtils.isNotBlank(fromWhen)) {

View File

@@ -24,10 +24,9 @@ import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -92,7 +91,7 @@ public class SolrUpgradePre6xStatistics {
private int numProcessed = 0; private int numProcessed = 0;
private long totalCache = 0; private long totalCache = 0;
private long numUncache = 0; private long numUncache = 0;
private List<SolrInputDocument> docs = new ArrayList<SolrInputDocument>(); private final List<SolrInputDocument> docs = new ArrayList<>();
private Context context; private Context context;
//Enum to identify the named SOLR statistics fields to update //Enum to identify the named SOLR statistics fields to update
@@ -122,7 +121,7 @@ public class SolrUpgradePre6xStatistics {
// This code will operate on one shard at a time, therefore the SOLR web service will be accessed directly rather // This code will operate on one shard at a time, therefore the SOLR web service will be accessed directly rather
// than make use of the DSpace Solr Logger which only writes to the current shard // than make use of the DSpace Solr Logger which only writes to the current shard
private HttpSolrServer server; private final HttpSolrClient server;
//Allows for smart use of hibernate cache //Allows for smart use of hibernate cache
private Item lastItem = null; private Item lastItem = null;
@@ -137,6 +136,7 @@ public class SolrUpgradePre6xStatistics {
* Construct the utility class from the command line options * Construct the utility class from the command line options
* @param indexName name of the statistics shard to update * @param indexName name of the statistics shard to update
* @param numRec maximum number of records to process * @param numRec maximum number of records to process
* @param batchSize batch this many documents before updating.
* @throws IOException * @throws IOException
* @throws SolrServerException * @throws SolrServerException
*/ */
@@ -145,8 +145,8 @@ public class SolrUpgradePre6xStatistics {
String serverPath = configurationService.getProperty("solr-statistics.server"); String serverPath = configurationService.getProperty("solr-statistics.server");
serverPath = serverPath.replaceAll("statistics$", indexName); serverPath = serverPath.replaceAll("statistics$", indexName);
System.out.println("Connecting to " + serverPath); System.out.println("Connecting to " + serverPath);
server = new HttpSolrServer(serverPath); server = new HttpSolrClient.Builder(serverPath)
server.setMaxTotalConnections(1); .build();
this.numRec = numRec; this.numRec = numRec;
this.batchSize = batchSize; this.batchSize = batchSize;
refreshContext(); refreshContext();
@@ -374,7 +374,7 @@ public class SolrUpgradePre6xStatistics {
/* /*
* Report on the existence of legacy id records within a shard * Report on the existence of legacy id records within a shard
*/ */
private void runReport() throws SolrServerException { private void runReport() throws SolrServerException, IOException {
System.out.println(); System.out.println();
System.out.println("================================================================="); System.out.println("=================================================================");
System.out.println("\t*** Statistics Records with Legacy Id ***\n"); System.out.println("\t*** Statistics Records with Legacy Id ***\n");
@@ -388,10 +388,9 @@ public class SolrUpgradePre6xStatistics {
/* /*
* Report on the existence of specific legacy id records within a shard * Report on the existence of specific legacy id records within a shard
*/ */
private long runReportQuery() throws SolrServerException { private long runReportQuery() throws SolrServerException, IOException {
StringBuilder sb = new StringBuilder(MIGQUERY);
SolrQuery sQ = new SolrQuery(); SolrQuery sQ = new SolrQuery();
sQ.setQuery(sb.toString()); sQ.setQuery(MIGQUERY);
sQ.setFacet(true); sQ.setFacet(true);
sQ.addFacetField("type"); sQ.addFacetField("type");
sQ.addFacetField("scopeType"); sQ.addFacetField("scopeType");
@@ -495,7 +494,10 @@ public class SolrUpgradePre6xStatistics {
for (int i = 0; i < sdl.size() && (numProcessed < numRec); i++) { for (int i = 0; i < sdl.size() && (numProcessed < numRec); i++) {
SolrDocument sd = sdl.get(i); SolrDocument sd = sdl.get(i);
SolrInputDocument input = ClientUtils.toSolrInputDocument(sd); SolrInputDocument input = new SolrInputDocument(); //ClientUtils.toSolrInputDocument(sd);
for (String name : sd.getFieldNames()) { // https://stackoverflow.com/a/38536843/2916377
input.addField(name, sd.getFieldValue(name));
}
input.remove("_version_"); input.remove("_version_");
for (FIELD col : FIELD.values()) { for (FIELD col : FIELD.values()) {
mapField(input, col); mapField(input, col);

View File

@@ -21,14 +21,12 @@ import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator; import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.factory.WorkflowServiceFactory;
/** /**
* Class representing an item going through the workflow process in DSpace * Class representing an item going through the workflow process in DSpace
@@ -188,20 +186,6 @@ public class BasicWorkflowItem implements WorkflowItem {
this.publishedBefore = b; this.publishedBefore = b;
} }
@Override
public void update() throws SQLException, AuthorizeException {
Context context = null;
try {
context = new Context();
WorkflowServiceFactory.getInstance().getWorkflowItemService().update(context, this);
} finally {
if (context != null && context.isValid()) {
context.abort();
}
}
}
@Override @Override
public int getType() { public int getType() {
return Constants.WORKFLOWITEM; return Constants.WORKFLOWITEM;

View File

@@ -315,6 +315,11 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
Step currentStep = currentActionConfig.getStep(); Step currentStep = currentActionConfig.getStep();
if (currentActionConfig.getProcessingAction().isAuthorized(c, request, wi)) { if (currentActionConfig.getProcessingAction().isAuthorized(c, request, wi)) {
ActionResult outcome = currentActionConfig.getProcessingAction().execute(c, wi, currentStep, request); ActionResult outcome = currentActionConfig.getProcessingAction().execute(c, wi, currentStep, request);
// the cancel action is the default when the request is not understood or a "back to mydspace" was
// pressed in the old UI
if (outcome.getType() == ActionResult.TYPE.TYPE_CANCEL) {
throw new WorkflowException("Unprocessable request for the action " + currentStep.getId());
}
c.addEvent(new Event(Event.MODIFY, Constants.ITEM, wi.getItem().getID(), null, c.addEvent(new Event(Event.MODIFY, Constants.ITEM, wi.getItem().getID(), null,
itemService.getIdentifiers(c, wi.getItem()))); itemService.getIdentifiers(c, wi.getItem())));
return processOutcome(c, user, workflow, currentStep, currentActionConfig, outcome, wi, false); return processOutcome(c, user, workflow, currentStep, currentActionConfig, outcome, wi, false);
@@ -668,6 +673,8 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
removeUserItemPolicies(c, wi.getItem(), task.getOwner()); removeUserItemPolicies(c, wi.getItem(), task.getOwner());
claimedTaskService.delete(c, task); claimedTaskService.delete(c, task);
} }
c.addEvent(new Event(Event.MODIFY, Constants.ITEM, wi.getItem().getID(), null,
itemService.getIdentifiers(c, wi.getItem())));
} }
/* /*
@@ -1058,17 +1065,10 @@ public class XmlWorkflowServiceImpl implements XmlWorkflowService {
} }
protected void revokeReviewerPolicies(Context context, Item item) throws SQLException, AuthorizeException { protected void revokeReviewerPolicies(Context context, Item item) throws SQLException, AuthorizeException {
// get bundle "ORIGINAL" List<Bundle> bundles = item.getBundles();
Bundle originalBundle;
try {
originalBundle = itemService.getBundles(item, "ORIGINAL").get(0);
} catch (IndexOutOfBoundsException ex) {
originalBundle = null;
}
for (Bundle originalBundle : bundles) {
// remove bitstream and bundle level policies // remove bitstream and bundle level policies
if (originalBundle != null) {
// We added policies for Bitstreams of the bundle "original" only
for (Bitstream bitstream : originalBundle.getBitstreams()) { for (Bitstream bitstream : originalBundle.getBitstreams()) {
authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW); authorizeService.removeAllPoliciesByDSOAndType(context, bitstream, ResourcePolicy.TYPE_WORKFLOW);
} }

View File

@@ -28,6 +28,8 @@ import org.dspace.xmlworkflow.state.actions.WorkflowActionConfig;
*/ */
public interface XmlWorkflowFactory { public interface XmlWorkflowFactory {
public final String LEGACY_WORKFLOW_NAME = "default";
public Workflow getWorkflow(Collection collection) throws IOException, WorkflowConfigurationException, SQLException; public Workflow getWorkflow(Collection collection) throws IOException, WorkflowConfigurationException, SQLException;
public Step createStep(Workflow workflow, String stepID) throws WorkflowConfigurationException, IOException; public Step createStep(Workflow workflow, String stepID) throws WorkflowConfigurationException, IOException;

View File

@@ -7,9 +7,6 @@
*/ */
package org.dspace.xmlworkflow.storedcomponents; package org.dspace.xmlworkflow.storedcomponents;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.FetchType; import javax.persistence.FetchType;
@@ -20,7 +17,6 @@ import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne; import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator; import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import javax.persistence.Transient;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -40,9 +36,6 @@ import org.dspace.eperson.EPerson;
@Table(name = "cwf_claimtask") @Table(name = "cwf_claimtask")
public class ClaimedTask implements ReloadableEntity<Integer>, IndexableObject<Integer> { public class ClaimedTask implements ReloadableEntity<Integer>, IndexableObject<Integer> {
@Transient
public transient Map<String, Object> extraInfo = new HashMap<String, Object>();
@Id @Id
@Column(name = "claimtask_id") @Column(name = "claimtask_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cwf_claimtask_seq") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cwf_claimtask_seq")
@@ -126,7 +119,7 @@ public class ClaimedTask implements ReloadableEntity<Integer>, IndexableObject<I
@Override @Override
public int getType() { public int getType() {
return Constants.WORKFLOW_CLAIMED; return Constants.CLAIMEDTASK;
} }
} }

View File

@@ -39,7 +39,7 @@ public class ClaimedTaskServiceImpl implements ClaimedTaskService {
@Override @Override
public int getSupportsTypeConstant() { public int getSupportsTypeConstant() {
return Constants.WORKFLOW_CLAIMED; return Constants.CLAIMEDTASK;
} }
@Override @Override

View File

@@ -7,9 +7,6 @@
*/ */
package org.dspace.xmlworkflow.storedcomponents; package org.dspace.xmlworkflow.storedcomponents;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.Column; import javax.persistence.Column;
import javax.persistence.Entity; import javax.persistence.Entity;
import javax.persistence.FetchType; import javax.persistence.FetchType;
@@ -21,7 +18,6 @@ import javax.persistence.ManyToOne;
import javax.persistence.OneToOne; import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator; import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import javax.persistence.Transient;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.core.Constants; import org.dspace.core.Constants;
@@ -42,9 +38,6 @@ import org.dspace.eperson.Group;
@Table(name = "cwf_pooltask") @Table(name = "cwf_pooltask")
public class PoolTask implements ReloadableEntity<Integer>, IndexableObject<Integer> { public class PoolTask implements ReloadableEntity<Integer>, IndexableObject<Integer> {
@Transient
public transient Map<String, Object> extraInfo = new HashMap<String, Object>();
@Id @Id
@Column(name = "pooltask_id") @Column(name = "pooltask_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cwf_pooltask_seq") @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "cwf_pooltask_seq")
@@ -141,7 +134,7 @@ public class PoolTask implements ReloadableEntity<Integer>, IndexableObject<Inte
@Override @Override
public int getType() { public int getType() {
return Constants.WORKFLOW_POOL; return Constants.POOLTASK;
} }
} }

View File

@@ -50,7 +50,7 @@ public class PoolTaskServiceImpl implements PoolTaskService {
@Override @Override
public int getSupportsTypeConstant() { public int getSupportsTypeConstant() {
return Constants.WORKFLOW_POOL; return Constants.POOLTASK;
} }
@Override @Override

View File

@@ -21,7 +21,6 @@ import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator; import javax.persistence.SequenceGenerator;
import javax.persistence.Table; import javax.persistence.Table;
import org.dspace.authorize.AuthorizeException;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Item; import org.dspace.content.Item;
@@ -30,7 +29,6 @@ import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity; import org.dspace.core.ReloadableEntity;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.workflow.WorkflowItem; import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.factory.WorkflowServiceFactory;
/** /**
* Class representing an item going through the workflow process in DSpace * Class representing an item going through the workflow process in DSpace
@@ -139,20 +137,6 @@ public class XmlWorkflowItem implements WorkflowItem, ReloadableEntity<Integer>,
this.publishedBefore = b; this.publishedBefore = b;
} }
@Override
public void update() throws SQLException, AuthorizeException {
Context context = null;
try {
context = new Context();
WorkflowServiceFactory.getInstance().getWorkflowItemService().update(context, this);
} finally {
if (context != null && context.isValid()) {
context.abort();
}
}
}
@Override @Override
public int getState() { public int getState() {
// FIXME not used by the xml workflow, should be removed when the basic workflow is removed and the interfaces // FIXME not used by the xml workflow, should be removed when the basic workflow is removed and the interfaces

View File

@@ -0,0 +1,79 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
-- SQL code to update the ID (primary key) generating sequences, if some
-- import operation has set explicit IDs.
--
-- Sequences are used to generate IDs for new rows in the database. If a
-- bulk import operation, such as an SQL dump, specifies primary keys for
-- imported data explicitly, the sequences are out of sync and need updating.
-- This SQL code does just that.
--
-- This should rarely be needed; any bulk import should be performed using the
-- org.dspace.content API which is safe to use concurrently and in multiple
-- JVMs. The SQL code below will typically only be required after a direct
-- SQL data dump from a backup or somesuch.
-- The 'updateseq' procedure was derived from incseq.sql found at:
-- http://www.akadia.com/services/scripts/incseq.sql
DECLARE
PROCEDURE updateseq ( seq IN VARCHAR,
tbl IN VARCHAR,
attr IN VARCHAR,
cond IN VARCHAR DEFAULT '' ) IS
curr NUMBER := 0;
BEGIN
EXECUTE IMMEDIATE 'SELECT max(' || attr
|| ') FROM ' || tbl
|| ' ' || cond
INTO curr;
curr := curr + 1;
EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq;
EXECUTE IMMEDIATE 'CREATE SEQUENCE '
|| seq
|| ' START WITH '
|| NVL(curr, 1);
END updateseq;
BEGIN
updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry',
'bitstream_format_id');
updateseq('fileextension_seq', 'fileextension', 'file_extension_id');
updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id');
updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id');
updateseq('workflowitem_seq', 'workflowitem', 'workflow_id');
updateseq('tasklistitem_seq', 'tasklistitem', 'tasklist_id');
updateseq('registrationdata_seq', 'registrationdata',
'registrationdata_id');
updateseq('subscription_seq', 'subscription', 'subscription_id');
updateseq('metadatafieldregistry_seq', 'metadatafieldregistry',
'metadata_field_id');
updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id');
updateseq('metadataschemaregistry_seq', 'metadataschemaregistry',
'metadata_schema_id');
updateseq('harvested_collection_seq', 'harvested_collection', 'id');
updateseq('harvested_item_seq', 'harvested_item', 'id');
updateseq('webapp_seq', 'webapp', 'webapp_id');
updateseq('requestitem_seq', 'requestitem', 'requestitem_id');
updateseq('handle_id_seq', 'handle', 'handle_id');
-- Handle Sequence is a special case. Since Handles minted by DSpace
-- use the 'handle_seq', we need to ensure the next assigned handle
-- will *always* be unique. So, 'handle_seq' always needs to be set
-- to the value of the *largest* handle suffix. That way when the
-- next handle is assigned, it will use the next largest number. This
-- query does the following:
-- For all 'handle' values which have a number in their suffix
-- (after '/'), find the maximum suffix value, convert it to a
-- number, and set the 'handle_seq' to start at the next value (see
-- updateseq above for more).
updateseq('handle_seq', 'handle',
q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}',
q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}');
END;

View File

@@ -17,4 +17,16 @@ not realize you manually ran one or more scripts.
Please see the Flyway Documentation for more information: http://flywaydb.org/ Please see the Flyway Documentation for more information: http://flywaydb.org/
## Using the update-sequences.sql script
The `update-sequences.sql` script in this directory may still be used to update
your internal database counts if you feel they have gotten out of "sync". This
may sometimes occur after large restores of content (e.g. when using the DSpace
[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore)
feature).
This `update-sequences.sql` script can be executed by running
"dspace database update-sequences". It will not harm your
database (or its contents) in any way. It just ensures all database counts (i.e.
sequences) are properly set to the next available value.

View File

@@ -1,35 +1,10 @@
-- --
-- update-sequences.sql -- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
-- --
-- Copyright (c) 2002-2016, The DSpace Foundation. All rights reserved. -- http://www.dspace.org/license/
-- --
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are
-- met:
--
-- - Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- - Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- Neither the name of the DSpace Foundation nor the names of its
-- contributors may be used to endorse or promote products derived from
-- this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
-- DAMAGE.
-- SQL code to update the ID (primary key) generating sequences, if some -- SQL code to update the ID (primary key) generating sequences, if some
-- import operation has set explicit IDs. -- import operation has set explicit IDs.

View File

@@ -0,0 +1,2 @@
<?xml version='1.0'?>
<solr/>

View File

@@ -58,7 +58,7 @@ public class CuratorTest
// Get and configure a Curator. // Get and configure a Curator.
Curator instance = new Curator(); Curator instance = new Curator();
instance.setReporter("-"); // Send any report to standard out. FIXME when DS-3989 is merged instance.setReporter(System.out); // Send any report to standard out.
instance.addTask(TASK_NAME); instance.addTask(TASK_NAME);
// Configure the run. // Configure the run.

View File

@@ -0,0 +1,202 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.curate;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
import org.dspace.AbstractUnitTest;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Site;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.services.ConfigurationService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Drive the Curator and check results.
*
* @author mhwood
*/
public class ITCurator
extends AbstractUnitTest {
Logger LOG = LoggerFactory.getLogger(ITCurator.class);
public ITCurator() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* The report should contain contributions from all tasks and all curated objects.
*
* @throws SQLException passed through.
* @throws IOException passed through.
* @throws AuthorizeException passed through.
*/
@Test
public void testCurate_Reporting()
throws SQLException, IOException, AuthorizeException {
// Configure for testing.
ConfigurationService cfg = kernelImpl.getConfigurationService();
cfg.setProperty("plugin.named.org.dspace.curate.CurationTask",
Task1.class.getName() + " = task1");
cfg.addPropertyValue("plugin.named.org.dspace.curate.CurationTask",
Task2.class.getName() + " = task2");
// Create some structure.
context.turnOffAuthorisationSystem();
Site site = ContentServiceFactory.getInstance()
.getSiteService()
.findSite(context);
Community community = ContentServiceFactory.getInstance()
.getCommunityService()
.create(null, context);
// Run some tasks.
ListReporter reporter = new ListReporter();
Curator curator = new Curator();
curator.setReporter(reporter);
curator.addTask("task1");
curator.addTask("task2");
curator.curate(context, site);
// Validate the results.
List<String> report = reporter.getReport();
for (String aReport : report) {
LOG.info("Report: {}", aReport);
}
Pattern pattern;
pattern = Pattern.compile(String.format("task1.*%s", site.getHandle()));
Assert.assertTrue("A report should mention 'task1' and site's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task1.*%s", community.getHandle()));
Assert.assertTrue("A report should mention 'task1' and the community's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task2.*%s", site.getHandle()));
Assert.assertTrue("A report should mention 'task2' and the Site's handle",
reportMatcher(report, pattern));
pattern = Pattern.compile(String.format("task2.*%s", community.getHandle()));
Assert.assertTrue("A report should mention 'task2' and the community's handle",
reportMatcher(report, pattern));
}
/**
* Match a collection of strings against a regular expression.\
*
* @param reports strings to be searched.
* @param pattern expression to be matched.
* @return true if at least one string matches the expression.
*/
private boolean reportMatcher(List<String> reports, Pattern pattern) {
for (String aReport : reports) {
if (pattern.matcher(aReport).find()) {
return true;
}
}
return false;
}
/**
* Dummy curation task for testing. Reports how it was invoked.
*
* @author mhwood
*/
public static class Task1 extends AbstractCurationTask {
public Task1() {
}
@Override
public int perform(DSpaceObject dso)
throws IOException {
curator.report(String.format(
"Task1 received 'perform' on taskId '%s' for object '%s'%n",
taskId, dso.getHandle()));
return Curator.CURATE_SUCCESS;
}
}
/**
* Dummy curation task for testing. Reports how it was invoked.
*
* @author mhwood
*/
public static class Task2 extends AbstractCurationTask {
public Task2() {
}
@Override
public int perform(DSpaceObject dso) throws IOException {
curator.report(String.format(
"Task2 received 'perform' on taskId '%s' for object '%s'%n",
taskId, dso.getHandle()));
return Curator.CURATE_SUCCESS;
}
}
/**
* Absorb report strings into a sequential collection.
*/
class ListReporter
implements Appendable {
private final List<String> report = new ArrayList<>();
/**
* Get the content of the report accumulator.
* @return accumulated reports.
*/
List<String> getReport() {
return report;
}
@Override
public Appendable append(CharSequence cs)
throws IOException {
report.add(cs.toString());
return this;
}
@Override
public Appendable append(CharSequence cs, int i, int i1)
throws IOException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Appendable append(char c)
throws IOException {
throw new UnsupportedOperationException("Not supported yet.");
}
}
}

View File

@@ -188,7 +188,7 @@
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId> <artifactId>solr-solrj</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.codehaus.woodstox</groupId> <groupId>org.codehaus.woodstox</groupId>

View File

@@ -14,7 +14,6 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.net.ConnectException; import java.net.ConnectException;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@@ -27,7 +26,6 @@ import java.util.UUID;
import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamException;
import com.lyncode.xoai.dataprovider.exceptions.ConfigurationException; import com.lyncode.xoai.dataprovider.exceptions.ConfigurationException;
import com.lyncode.xoai.dataprovider.exceptions.MetadataBindException;
import com.lyncode.xoai.dataprovider.exceptions.WritingXmlException; import com.lyncode.xoai.dataprovider.exceptions.WritingXmlException;
import com.lyncode.xoai.dataprovider.xml.XmlOutputContext; import com.lyncode.xoai.dataprovider.xml.XmlOutputContext;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@@ -36,9 +34,9 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
@@ -148,7 +146,7 @@ public class XOAI {
} else { } else {
SolrQuery solrParams = new SolrQuery("*:*") SolrQuery solrParams = new SolrQuery("*:*")
.addField("item.lastmodified") .addField("item.lastmodified")
.addSortField("item.lastmodified", ORDER.desc).setRows(1); .addSort("item.lastmodified", ORDER.desc).setRows(1);
SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams); SolrDocumentList results = DSpaceSolrSearch.query(solrServerResolver.getServer(), solrParams);
if (results.getNumFound() == 0) { if (results.getNumFound() == 0) {
@@ -176,7 +174,7 @@ public class XOAI {
} }
} }
private int index(Date last) throws DSpaceSolrIndexerException { private int index(Date last) throws DSpaceSolrIndexerException, IOException {
System.out System.out
.println("Incremental import. Searching for documents modified after: " .println("Incremental import. Searching for documents modified after: "
+ last.toString()); + last.toString());
@@ -209,7 +207,7 @@ public class XOAI {
* visibility since the last update. * visibility since the last update.
* @throws DSpaceSolrIndexerException * @throws DSpaceSolrIndexerException
*/ */
private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException { private Iterator<Item> getItemsWithPossibleChangesBefore(Date last) throws DSpaceSolrIndexerException, IOException {
try { try {
SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id"); SolrQuery params = new SolrQuery("item.willChangeStatus:true").addField("item.id");
SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params);
@@ -251,7 +249,7 @@ public class XOAI {
* Item that should be checked for its presence in the index. * Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfIndexed(Item item) { private boolean checkIfIndexed(Item item) throws IOException {
SolrQuery params = new SolrQuery("item.id:" + item.getID().toString()).addField("item.id"); SolrQuery params = new SolrQuery("item.id:" + item.getID().toString()).addField("item.id");
try { try {
SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params);
@@ -267,7 +265,7 @@ public class XOAI {
* Item that should be checked for its presence in the index. * Item that should be checked for its presence in the index.
* @return has it been indexed? * @return has it been indexed?
*/ */
private boolean checkIfVisibleInOAI(Item item) { private boolean checkIfVisibleInOAI(Item item) throws IOException {
SolrQuery params = new SolrQuery("item.id:" + item.getID().toString()).addField("item.public"); SolrQuery params = new SolrQuery("item.id:" + item.getID().toString()).addField("item.public");
try { try {
SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params); SolrDocumentList documents = DSpaceSolrSearch.query(solrServerResolver.getServer(), params);
@@ -286,7 +284,7 @@ public class XOAI {
try { try {
int i = 0; int i = 0;
int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000); int batchSize = configurationService.getIntProperty("oai.import.batch.size", 1000);
SolrServer server = solrServerResolver.getServer(); SolrClient server = solrServerResolver.getServer();
ArrayList<SolrInputDocument> list = new ArrayList<>(); ArrayList<SolrInputDocument> list = new ArrayList<>();
while (iterator.hasNext()) { while (iterator.hasNext()) {
try { try {
@@ -299,8 +297,7 @@ public class XOAI {
//Uncache the item to keep memory consumption low //Uncache the item to keep memory consumption low
context.uncacheEntity(item); context.uncacheEntity(item);
} catch (SQLException | MetadataBindException | ParseException } catch (SQLException | IOException | XMLStreamException | WritingXmlException ex) {
| XMLStreamException | WritingXmlException ex) {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
} }
i++; i++;
@@ -363,9 +360,9 @@ public class XOAI {
} }
private SolrInputDocument index(Item item) private SolrInputDocument index(Item item)
throws SQLException, MetadataBindException, ParseException, XMLStreamException, WritingXmlException { throws SQLException, IOException, XMLStreamException, WritingXmlException {
SolrInputDocument doc = new SolrInputDocument(); SolrInputDocument doc = new SolrInputDocument();
doc.addField("item.id", item.getID()); doc.addField("item.id", item.getID().toString());
String handle = item.getHandle(); String handle = item.getHandle();
doc.addField("item.handle", handle); doc.addField("item.handle", handle);

View File

@@ -7,9 +7,9 @@
*/ */
package org.dspace.xoai.services.api.solr; package org.dspace.xoai.services.api.solr;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
public interface SolrServerResolver { public interface SolrServerResolver {
SolrServer getServer() throws SolrServerException; SolrClient getServer() throws SolrServerException;
} }

View File

@@ -9,25 +9,25 @@ package org.dspace.xoai.services.impl.solr;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.xoai.services.api.config.ConfigurationService; import org.dspace.xoai.services.api.config.ConfigurationService;
import org.dspace.xoai.services.api.solr.SolrServerResolver; import org.dspace.xoai.services.api.solr.SolrServerResolver;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
public class DSpaceSolrServerResolver implements SolrServerResolver { public class DSpaceSolrServerResolver implements SolrServerResolver {
private static Logger log = LogManager.getLogger(DSpaceSolrServerResolver.class); private static final Logger log = LogManager.getLogger(DSpaceSolrServerResolver.class);
private static SolrServer server = null; private static SolrClient server = null;
@Autowired @Autowired
private ConfigurationService configurationService; private ConfigurationService configurationService;
@Override @Override
public SolrServer getServer() throws SolrServerException { public SolrClient getServer() throws SolrServerException {
if (server == null) { if (server == null) {
try { try {
server = new HttpSolrServer(configurationService.getProperty("oai", "solr.url")); server = new HttpSolrClient.Builder(configurationService.getProperty("oai", "solr.url")).build();
log.debug("Solr Server Initialized"); log.debug("Solr Server Initialized");
} catch (Exception e) { } catch (Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.xoai.services.impl.xoai; package org.dspace.xoai.services.impl.xoai;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -21,8 +22,8 @@ import com.lyncode.xoai.dataprovider.exceptions.IdDoesNotExistException;
import com.lyncode.xoai.dataprovider.filter.ScopedFilter; import com.lyncode.xoai.dataprovider.filter.ScopedFilter;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.dspace.xoai.data.DSpaceSolrItem; import org.dspace.xoai.data.DSpaceSolrItem;
@@ -37,11 +38,11 @@ import org.dspace.xoai.solr.exceptions.SolrSearchEmptyException;
* @author Lyncode Development Team (dspace at lyncode dot com) * @author Lyncode Development Team (dspace at lyncode dot com)
*/ */
public class DSpaceItemSolrRepository extends DSpaceItemRepository { public class DSpaceItemSolrRepository extends DSpaceItemRepository {
private static Logger log = LogManager.getLogger(DSpaceItemSolrRepository.class); private static final Logger log = LogManager.getLogger(DSpaceItemSolrRepository.class);
private SolrServer server; private final SolrClient server;
private SolrQueryResolver solrQueryResolver; private final SolrQueryResolver solrQueryResolver;
public DSpaceItemSolrRepository(SolrServer server, CollectionsService collectionsService, public DSpaceItemSolrRepository(SolrClient server, CollectionsService collectionsService,
HandleResolver handleResolver, SolrQueryResolver solrQueryResolver) { HandleResolver handleResolver, SolrQueryResolver solrQueryResolver) {
super(collectionsService, handleResolver); super(collectionsService, handleResolver);
this.server = server; this.server = server;
@@ -58,7 +59,7 @@ public class DSpaceItemSolrRepository extends DSpaceItemRepository {
try { try {
SolrQuery params = new SolrQuery("item.handle:" + parts[2]); SolrQuery params = new SolrQuery("item.handle:" + parts[2]);
return new DSpaceSolrItem(DSpaceSolrSearch.querySingle(server, params)); return new DSpaceSolrItem(DSpaceSolrSearch.querySingle(server, params));
} catch (SolrSearchEmptyException ex) { } catch (SolrSearchEmptyException | IOException ex) {
throw new IdDoesNotExistException(ex); throw new IdDoesNotExistException(ex);
} }
} }
@@ -79,9 +80,9 @@ public class DSpaceItemSolrRepository extends DSpaceItemRepository {
} }
}); });
return new ListItemIdentifiersResult(queryResult.hasMore(), identifierList, queryResult.getTotal()); return new ListItemIdentifiersResult(queryResult.hasMore(), identifierList, queryResult.getTotal());
} catch (DSpaceSolrException ex) { } catch (DSpaceSolrException | IOException ex) {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
return new ListItemIdentifiersResult(false, new ArrayList<ItemIdentifier>()); return new ListItemIdentifiersResult(false, new ArrayList<>());
} }
} }
@@ -91,14 +92,15 @@ public class DSpaceItemSolrRepository extends DSpaceItemRepository {
try { try {
QueryResult queryResult = retrieveItems(filters, offset, length); QueryResult queryResult = retrieveItems(filters, offset, length);
return new ListItemsResults(queryResult.hasMore(), queryResult.getResults(), queryResult.getTotal()); return new ListItemsResults(queryResult.hasMore(), queryResult.getResults(), queryResult.getTotal());
} catch (DSpaceSolrException ex) { } catch (DSpaceSolrException | IOException ex) {
log.error(ex.getMessage(), ex); log.error(ex.getMessage(), ex);
return new ListItemsResults(false, new ArrayList<Item>()); return new ListItemsResults(false, new ArrayList<>());
} }
} }
private QueryResult retrieveItems(List<ScopedFilter> filters, int offset, int length) throws DSpaceSolrException { private QueryResult retrieveItems(List<ScopedFilter> filters, int offset, int length)
List<Item> list = new ArrayList<Item>(); throws DSpaceSolrException, IOException {
List<Item> list = new ArrayList<>();
SolrQuery params = new SolrQuery(solrQueryResolver.buildQuery(filters)) SolrQuery params = new SolrQuery(solrQueryResolver.buildQuery(filters))
.setRows(length) .setRows(length)
.setStart(offset); .setStart(offset);

View File

@@ -8,9 +8,11 @@
package org.dspace.xoai.solr; package org.dspace.xoai.solr;
import java.io.IOException;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrQuery.ORDER; import org.apache.solr.client.solrj.SolrQuery.ORDER;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
@@ -28,10 +30,10 @@ public class DSpaceSolrSearch {
*/ */
private DSpaceSolrSearch() { } private DSpaceSolrSearch() { }
public static SolrDocumentList query(SolrServer server, SolrQuery solrParams) public static SolrDocumentList query(SolrClient server, SolrQuery solrParams)
throws DSpaceSolrException { throws DSpaceSolrException, IOException {
try { try {
solrParams.addSortField("item.id", ORDER.asc); solrParams.addSort("item.id", ORDER.asc);
QueryResponse response = server.query(solrParams); QueryResponse response = server.query(solrParams);
return response.getResults(); return response.getResults();
} catch (SolrServerException ex) { } catch (SolrServerException ex) {
@@ -39,10 +41,10 @@ public class DSpaceSolrSearch {
} }
} }
public static SolrDocument querySingle(SolrServer server, SolrQuery solrParams) public static SolrDocument querySingle(SolrClient server, SolrQuery solrParams)
throws SolrSearchEmptyException { throws SolrSearchEmptyException, IOException {
try { try {
solrParams.addSortField("item.id", ORDER.asc); solrParams.addSort("item.id", ORDER.asc);
QueryResponse response = server.query(solrParams); QueryResponse response = server.query(solrParams);
if (response.getResults().getNumFound() > 0) { if (response.getResults().getNumFound() > 0) {
return response.getResults().get(0); return response.getResults().get(0);

View File

@@ -10,29 +10,29 @@ package org.dspace.xoai.solr;
import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
/** /**
* @author Lyncode Development Team (dspace at lyncode dot com) * @author Lyncode Development Team (dspace at lyncode dot com)
*/ */
public class DSpaceSolrServer { public class DSpaceSolrServer {
private static Logger log = LogManager.getLogger(DSpaceSolrServer.class); private static final Logger log = LogManager.getLogger(DSpaceSolrServer.class);
private static SolrServer _server = null; private static SolrClient _server = null;
/** /**
* Default constructor * Default constructor
*/ */
private DSpaceSolrServer() { } private DSpaceSolrServer() { }
public static SolrServer getServer() throws SolrServerException { public static SolrClient getServer() throws SolrServerException {
if (_server == null) { if (_server == null) {
try { try {
_server = new HttpSolrServer( _server = new HttpSolrClient.Builder(
ConfigurationManager.getProperty("oai", "solr.url")); ConfigurationManager.getProperty("oai", "solr.url")).build();
log.debug("Solr Server Initialized"); log.debug("Solr Server Initialized");
} catch (Exception e) { } catch (Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);

View File

@@ -10,6 +10,7 @@ package org.dspace.xoai.tests.helpers.stubs;
import static com.lyncode.xoai.dataprovider.core.Granularity.Second; import static com.lyncode.xoai.dataprovider.core.Granularity.Second;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.ParseException; import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
@@ -23,13 +24,14 @@ import com.lyncode.xoai.dataprovider.exceptions.MetadataBindException;
import com.lyncode.xoai.dataprovider.exceptions.WritingXmlException; import com.lyncode.xoai.dataprovider.exceptions.WritingXmlException;
import com.lyncode.xoai.dataprovider.xml.XmlOutputContext; import com.lyncode.xoai.dataprovider.xml.XmlOutputContext;
import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
public class ItemRepositoryBuilder { public class ItemRepositoryBuilder {
private SolrServer solrServer; private final SolrClient solrServer;
public ItemRepositoryBuilder(SolrServer solrServer) { public ItemRepositoryBuilder(SolrClient solrServer) {
this.solrServer = solrServer; this.solrServer = solrServer;
} }
@@ -37,7 +39,9 @@ public class ItemRepositoryBuilder {
try { try {
solrServer.add(index(builder)); solrServer.add(index(builder));
solrServer.commit(); solrServer.commit();
} catch (Exception e) { } catch (MetadataBindException | WritingXmlException | IOException
| SQLException | ParseException | XMLStreamException
| SolrServerException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
return this; return this;
@@ -74,9 +78,9 @@ public class ItemRepositoryBuilder {
} }
public static class DSpaceItemBuilder { public static class DSpaceItemBuilder {
private List<String> collections = new ArrayList<String>(); private final List<String> collections = new ArrayList<>();
private List<String> communities = new ArrayList<String>(); private final List<String> communities = new ArrayList<>();
private MetadataBuilder metadataBuilder = new MetadataBuilder(); private final MetadataBuilder metadataBuilder = new MetadataBuilder();
private String handle; private String handle;
private int id; private int id;
private String submitter; private String submitter;

View File

@@ -19,7 +19,7 @@ import com.lyncode.xoai.dataprovider.services.api.ResourceResolver;
import com.lyncode.xoai.dataprovider.services.impl.BaseDateProvider; import com.lyncode.xoai.dataprovider.services.impl.BaseDateProvider;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.Configuration;
import com.lyncode.xoai.dataprovider.xml.xoaiconfig.FormatConfiguration; import com.lyncode.xoai.dataprovider.xml.xoaiconfig.FormatConfiguration;
import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrClient;
import org.dspace.xoai.controller.DSpaceOAIDataProvider; import org.dspace.xoai.controller.DSpaceOAIDataProvider;
import org.dspace.xoai.services.api.EarliestDateResolver; import org.dspace.xoai.services.api.EarliestDateResolver;
import org.dspace.xoai.services.api.FieldResolver; import org.dspace.xoai.services.api.FieldResolver;
@@ -51,7 +51,7 @@ import org.springframework.web.context.WebApplicationContext;
@WebAppConfiguration @WebAppConfiguration
@ContextConfiguration(classes = {DSpaceTestConfiguration.class, DSpaceOAIDataProvider.class}) @ContextConfiguration(classes = {DSpaceTestConfiguration.class, DSpaceOAIDataProvider.class})
public abstract class AbstractDSpaceTest { public abstract class AbstractDSpaceTest {
private static BaseDateProvider baseDateProvider = new BaseDateProvider(); private static final BaseDateProvider baseDateProvider = new BaseDateProvider();
@Autowired @Autowired
WebApplicationContext wac; WebApplicationContext wac;
private MockMvc mockMvc; private MockMvc mockMvc;
@@ -62,7 +62,7 @@ public abstract class AbstractDSpaceTest {
private StubbedEarliestDateResolver earliestDateResolver; private StubbedEarliestDateResolver earliestDateResolver;
private StubbedSetRepository setRepository; private StubbedSetRepository setRepository;
private StubbedResourceResolver resourceResolver; private StubbedResourceResolver resourceResolver;
private SolrServer solrServer; private SolrClient solrServer;
@Before @Before
public void setup() { public void setup() {

View File

@@ -346,6 +346,26 @@ public final class DSpaceConfigurationService implements ConfigurationService {
} }
} }
@Override
public synchronized boolean addPropertyValue(String name, Object value) {
if (name == null) {
throw new IllegalArgumentException("name cannot be null for setting configuration");
}
if (value == null) {
throw new IllegalArgumentException("configuration value may not be null");
}
// If the value is a type of String, trim any leading/trailing spaces before saving it.
if (String.class.isInstance(value)) {
value = ((String) value).trim();
}
Configuration configuration = getConfiguration();
boolean isNew = !configuration.containsKey(name);
configuration.addProperty(name, value);
return isNew;
}
/* (non-Javadoc) /* (non-Javadoc)
* @see org.dspace.services.ConfigurationService#setProperty(java.lang.String, java.lang.Object) * @see org.dspace.services.ConfigurationService#setProperty(java.lang.String, java.lang.Object)
*/ */

View File

@@ -0,0 +1,98 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.servicemanager.config;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.configuration2.MapConfiguration;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Bash does not allow environment variables that contain dots in their name.
* This Configuration loads environment variables that contains two underlines
* and replaces "__P__" -> "." and "__D__" -> "-"
* E.g.: dspace__P__baseUrl will be read as dspace.baseUrl.
* E.g.: my__D__dspace__P__prop will be read as my-dspace.prop.
*
* Most of this file was copied from org.apache.commons.configuration2.EnvironmentConfiguration.
*
* @author Pascal-Nicolas Becker -- dspace at pascal dash becker dot de
*/
public class DSpaceEnvironmentConfiguration extends MapConfiguration {
private static Logger log = LoggerFactory.getLogger(DSpaceEnvironmentConfiguration.class);
/**
* Create a Configuration based on the environment variables.
*
* @see System#getenv()
*/
public DSpaceEnvironmentConfiguration() {
super(getModifiedEnvMap());
}
public static Map<String, Object> getModifiedEnvMap() {
HashMap<String, Object> env = new HashMap<>(System.getenv().size());
for (String key : System.getenv().keySet()) {
// ignore all properties that do not contain __ as those will be loaded
// by apache commons config environment lookup.
if (!StringUtils.contains(key, "__")) {
continue;
}
// replace "__P__" with a single dot.
// replace "__D__" with a single dash.
String lookup = StringUtils.replace(key, "__P__", ".");
lookup = StringUtils.replace(lookup, "__D__", "-");
if (System.getenv(key) != null) {
// store the new key with the old value in our new properties map.
env.put(lookup, System.getenv(key));
log.debug("Found env " + lookup + " = " + System.getenv(key) + ".");
} else {
log.debug("Didn't found env " + lookup + ".");
}
}
return env;
}
/**
* Adds a property to this configuration. Because this configuration is
* read-only, this operation is not allowed and will cause an exception.
*
* @param key the key of the property to be added
* @param value the property value
*/
@Override
protected void addPropertyDirect(String key, Object value) {
throw new UnsupportedOperationException("EnvironmentConfiguration is read-only!");
}
/**
* Removes a property from this configuration. Because this configuration is
* read-only, this operation is not allowed and will cause an exception.
*
* @param key the key of the property to be removed
*/
@Override
protected void clearPropertyDirect(String key) {
throw new UnsupportedOperationException("EnvironmentConfiguration is read-only!");
}
/**
* Removes all properties from this configuration. Because this
* configuration is read-only, this operation is not allowed and will cause
* an exception.
*/
@Override
protected void clearInternal() {
throw new UnsupportedOperationException("EnvironmentConfiguration is read-only!");
}
}

View File

@@ -0,0 +1,33 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.servicemanager.config;
import java.util.Collections;
import org.apache.commons.configuration2.builder.BasicBuilderParameters;
import org.apache.commons.configuration2.builder.combined.BaseConfigurationBuilderProvider;
/**
* Configures DSpaceEnvironmentConfiguration. Reuses BasicConfigurationBuilder and its parameters.
*
* @author Pascal-Nicolas Becker -- dspace at pascal dash becker dot de
*/
public class DSpaceEnvironmentConfigurationBuilderProvider extends BaseConfigurationBuilderProvider {
/**
* Creates a new instance of {@code BaseConfigurationBuilderProvider} and
* initializes all its properties.
*/
public DSpaceEnvironmentConfigurationBuilderProvider() {
super("org.apache.commons.configuration2.builder.BasicConfigurationBuilder",
null,
"org.dspace.servicemanager.config.DSpaceEnvironmentConfiguration",
// this probably contains much more than we need, nevertheless reusing it is easier than rewriting
Collections.singleton(BasicBuilderParameters.class.getName()));
}
}

View File

@@ -237,6 +237,16 @@ public interface ConfigurationService {
*/ */
public boolean hasProperty(String name); public boolean hasProperty(String name);
/**
* Add a value to a configuration property.
*
* @param name the property name. May not be null.
* @param value the property value. May not be null.
* @return true if a new property was created.
* @throws IllegalArgumentException if the name or value is null.
*/
public boolean addPropertyValue(String name, Object value);
/** /**
* Set a configuration property (setting) in the system. * Set a configuration property (setting) in the system.
* Type is not important here since conversion happens automatically * Type is not important here since conversion happens automatically

View File

@@ -1,297 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed
with this work for additional information regarding copyright
ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of
the License at http://www.apache.org/licenses/LICENSE-2.0 Unless
required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
-->
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>7.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-solr</artifactId>
<name>Apache Solr Webapp</name>
<description>Apache Solr Server</description>
<packaging>war</packaging>
<properties>
<!-- WARNING: When updating this dependency be sure to check the "zookeeper" pinned version !-->
<solr.version>4.10.4</solr.version>
<!-- 'root.basedir' is the path to the root [dspace-src] dir. It must be redefined by each child POM,
as it is used to reference the LICENSE_HEADER and *.properties file(s) in that directory. -->
<root.basedir>${basedir}/..</root.basedir>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<!-- This 'dspace-solr' WAR overlays the Apache Solr Web Application
available in Maven Central -->
<overlays>
<overlay>
<groupId>org.apache.solr</groupId>
<artifactId>solr</artifactId>
<!--
Exclude the solr core named apache-solr-core, this is needed because the dspace-solr changes
need to take precendence over the solr-core, the solr-core will still be loaded in the solr-core.jar
-->
<excludes>
<exclude>WEB-INF/lib/apache-solr-core-${solr.version}.jar</exclude>
</excludes>
</overlay>
</overlays>
<!-- Filter the web.xml (needed for IDE compatibility/debugging) -->
<filteringDeploymentDescriptors>true</filteringDeploymentDescriptors>
</configuration>
<executions>
<execution>
<!-- This execution creates a normal WAR (with all JARs, etc)-->
<id>webapp</id>
<configuration>
<primaryArtifact>true</primaryArtifact>
<archiveClasses>true</archiveClasses>
<attachClasses>true</attachClasses>
<classesClassifier>classes</classesClassifier>
<warSourceExcludes>WEB-INF/classes/**</warSourceExcludes>
<packagingExcludes>
WEB-INF/classes/**,
WEB-INF/lib/slf4j-jdk14-*.jar,
WEB-INF/lib/log4j-over-slf4j-*.jar
</packagingExcludes>
</configuration>
<goals>
<goal>war</goal>
</goals>
<phase>package</phase>
</execution>
<execution>
<!-- This execution creates a "skinny" WAR (without any JARs included)-->
<id>skinny</id>
<configuration>
<primaryArtifact>false</primaryArtifact>
<classifier>skinny</classifier>
<warSourceExcludes>WEB-INF/lib/**,WEB-INF/classes/**</warSourceExcludes>
<packagingExcludes>WEB-INF/lib/**,WEB-INF/classes/**</packagingExcludes>
</configuration>
<goals>
<goal>war</goal>
</goals>
<phase>package</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<!-- Depends on the Apache 'solr' web application (see Overlay settings above)-->
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr</artifactId>
<version>${solr.version}</version>
<type>war</type>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-core</artifactId>
<version>${solr.version}</version>
<type>jar</type>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-continuation</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-deploy</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-http</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-io</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-jmx</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-security</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-server</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlet</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-xml</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<version>${solr.version}</version>
<exclusions>
<exclusion>
<artifactId>javax.servlet</artifactId>
<groupId>org.eclipse.jetty.orbit</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-continuation</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-deploy</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-http</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-io</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-jmx</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-security</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-server</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-servlet</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-util</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-webapp</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<artifactId>jetty-xml</artifactId>
<groupId>org.eclipse.jetty</groupId>
</exclusion>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--This version is pinned on this one, since solr introduces both 3.4.5 & 3.4.6-->
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<version>3.4.6</version>
<exclusions>
<exclusion>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Replace J.U.L. logging with log4j -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-jul</artifactId>
<version>${log4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-web</artifactId>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
</project>

View File

@@ -1,65 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.solr.filters;
import java.io.IOException;
import java.net.InetAddress;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
public class LocalHostRestrictionFilter implements Filter {
private boolean enabled = true;
public LocalHostRestrictionFilter() {
// TODO Auto-generated constructor stub
}
public void destroy() {
// TODO Auto-generated method stub
}
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain chain)
throws IOException, ServletException {
if (enabled) {
InetAddress ia = InetAddress.getLocalHost();
String localAddr = ia.getHostAddress();
String remoteAddr = request.getRemoteAddr();
if (!(localAddr.equals(remoteAddr) ||
remoteAddr.equals("127.0.0.1") ||
remoteAddr.startsWith("0:0:0:0:0:0:0:1"))) {
((HttpServletResponse) response).sendError(403);
return;
}
}
chain.doFilter(request, response);
}
/**
*
*/
public void init(FilterConfig arg0)
throws ServletException {
String restrict = arg0.getServletContext().getInitParameter(
"LocalHostRestrictionFilter.localhost");
if ("false".equalsIgnoreCase(restrict)) {
enabled = false;
}
}
}

View File

@@ -1,213 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at
http://www.dspace.org/license/
-->
<web-app version='2.5'
xmlns='http://java.sun.com/xml/ns/javaee'
xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'
xsi:schemaLocation='http://java.sun.com/xml/ns/javaee
http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd'>
<!-- Uncomment if you are trying to use a Resin version before 3.0.19.
Their XML implementation isn't entirely compatible with Xerces.
Below are the implementations to use with Sun's JVM.
<system-property javax.xml.xpath.XPathFactory=
"com.sun.org.apache.xpath.internal.jaxp.XPathFactoryImpl"/>
<system-property javax.xml.parsers.DocumentBuilderFactory=
"com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl"/>
<system-property javax.xml.parsers.SAXParserFactory=
"com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl"/>
-->
<env-entry>
<description>Solr home: configuration, cores etc.</description>
<env-entry-name>solr/home</env-entry-name>
<env-entry-value>${dspace.dir}/solr</env-entry-value>
<env-entry-type>java.lang.String</env-entry-type>
</env-entry>
<!-- Tell Solr where its log4j configuration is located -->
<!-- NOTE: Solr cannot use the default DSpace log4j configuration as it
isn't initialized until the DSpace Kernel starts up, and we don't want
Solr to depend on the DSpace Kernel/API -->
<context-param>
<description>
URL locating a Log4J configuration file (properties or XML).
</description>
<param-name>log4jConfiguration</param-name>
<param-value>${dspace.dir}/config/log4j-solr.xml</param-value>
</context-param>
<listener>
<listener-class>org.apache.logging.log4j.web.Log4jServletContextListener</listener-class>
</listener>
<filter>
<description>Activate logging</description>
<filter-name>log4jServletFilter</filter-name>
<filter-class>org.apache.logging.log4j.web.Log4jServletFilter</filter-class>
</filter>
<!-- Any path (name) registered in solrconfig.xml will be sent to that filter -->
<filter>
<filter-name>LocalHostRestrictionFilter</filter-name>
<filter-class>org.dspace.solr.filters.LocalHostRestrictionFilter</filter-class>
</filter>
<!-- Any path (name) registered in solrconfig.xml will be sent to that filter -->
<filter>
<filter-name>SolrRequestFilter</filter-name>
<filter-class>org.apache.solr.servlet.SolrDispatchFilter</filter-class>
<!-- If you are wiring Solr into a larger web application which controls
the web context root, you will probably want to mount Solr under
a path prefix (app.war with /app/solr mounted into it, for example).
You will need to put this prefix in front of the SolrDispatchFilter
url-pattern mapping too (/solr/*), and also on any paths for
legacy Solr servlet mappings you may be using.
For the Admin UI to work properly in a path-prefixed configuration,
the admin folder containing the resources needs to be under the app context root
named to match the path-prefix. For example:
.war
xxx
js
main.js
-->
<!--
<init-param>
<param-name>path-prefix</param-name>
<param-value>/xxx</param-value>
</init-param>
-->
</filter>
<filter-mapping>
<filter-name>log4jServletFilter</filter-name>
<url-pattern>/*</url-pattern>
<dispatcher>REQUEST</dispatcher>
<dispatcher>FORWARD</dispatcher>
<dispatcher>INCLUDE</dispatcher>
<dispatcher>ERROR</dispatcher>
</filter-mapping>
<filter-mapping>
<filter-name>LocalHostRestrictionFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<filter-mapping>
<!--
NOTE: When using multicore, /admin JSP URLs with a core specified
such as /solr/coreName/admin/stats.jsp get forwarded by a
RequestDispatcher to /solr/admin/stats.jsp with the specified core
put into request scope keyed as "org.apache.solr.SolrCore".
It is unnecessary, and potentially problematic, to have the SolrDispatchFilter
configured to also filter on forwards. Do not configure
this dispatcher as <dispatcher>FORWARD</dispatcher>.
-->
<filter-name>SolrRequestFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
<!-- Otherwise it will continue to the old servlets -->
<servlet>
<servlet-name>Zookeeper</servlet-name>
<servlet-class>org.apache.solr.servlet.ZookeeperInfoServlet</servlet-class>
</servlet>
<servlet>
<servlet-name>LoadAdminUI</servlet-name>
<servlet-class>org.apache.solr.servlet.LoadAdminUiServlet</servlet-class>
</servlet>
<!-- Remove in Solr 5.0 -->
<!-- This sends SC_MOVED_PERMANENTLY (301) for resources that changed in 4.0 -->
<servlet>
<servlet-name>RedirectOldAdminUI</servlet-name>
<servlet-class>org.apache.solr.servlet.RedirectServlet</servlet-class>
<init-param>
<param-name>destination</param-name>
<param-value>${context}/#/</param-value>
</init-param>
</servlet>
<servlet>
<servlet-name>RedirectOldZookeeper</servlet-name>
<servlet-class>org.apache.solr.servlet.RedirectServlet</servlet-class>
<init-param>
<param-name>destination</param-name>
<param-value>${context}/zookeeper</param-value>
</init-param>
</servlet>
<servlet>
<servlet-name>RedirectLogging</servlet-name>
<servlet-class>org.apache.solr.servlet.RedirectServlet</servlet-class>
<init-param>
<param-name>destination</param-name>
<param-value>${context}/#/~logging</param-value>
</init-param>
</servlet>
<servlet>
<servlet-name>SolrRestApi</servlet-name>
<servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>
<init-param>
<param-name>org.restlet.application</param-name>
<param-value>org.apache.solr.rest.SolrRestApi</param-value>
</init-param>
</servlet>
<servlet-mapping>
<servlet-name>RedirectOldAdminUI</servlet-name>
<url-pattern>/admin/</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>RedirectOldAdminUI</servlet-name>
<url-pattern>/admin</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>RedirectOldZookeeper</servlet-name>
<url-pattern>/zookeeper.jsp</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>RedirectLogging</servlet-name>
<url-pattern>/logging</url-pattern>
</servlet-mapping>
<!-- Servlet Mapping -->
<servlet-mapping>
<servlet-name>Zookeeper</servlet-name>
<url-pattern>/zookeeper</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>LoadAdminUI</servlet-name>
<url-pattern>/admin.html</url-pattern>
</servlet-mapping>
<servlet-mapping>
<servlet-name>SolrRestApi</servlet-name>
<url-pattern>/schema/*</url-pattern>
</servlet-mapping>
<mime-mapping>
<extension>.xsl</extension>
<!-- per http://www.w3.org/TR/2006/PR-xslt20-20061121/ -->
<mime-type>application/xslt+xml</mime-type>
</mime-mapping>
<welcome-file-list>
<welcome-file>admin.html</welcome-file>
</welcome-file-list>
</web-app>

View File

@@ -126,6 +126,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
@@ -139,6 +140,7 @@
<dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir> <dspace.dir>${agnostic.build.dir}/testing/dspace/</dspace.dir>
<!-- Turn off any DSpace logging --> <!-- Turn off any DSpace logging -->
<dspace.log.init.disable>true</dspace.log.init.disable> <dspace.log.init.disable>true</dspace.log.init.disable>
<solr.install.dir>${agnostic.build.dir}/testing/dspace/solr/</solr.install.dir>
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
@@ -198,32 +200,11 @@
<artifactId>spring-boot-starter-aop</artifactId> <artifactId>spring-boot-starter-aop</artifactId>
<version>${spring-boot.version}</version> <version>${spring-boot.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.springframework.boot</groupId> <groupId>com.flipkart.zjsonpatch</groupId>
<artifactId>spring-boot-starter-test</artifactId> <artifactId>zjsonpatch</artifactId>
<version>${spring-boot.version}</version> <version>0.4.6</version>
<scope>test</scope>
<exclusions>
<!-- Temporary exclusion to avoid dependency conflict with version of org.json:json used by dspace-api.
NOTE: THIS CAN BE REMOVED ONCE WE UPGRADE TO SPRING-BOOT v1.5 (or above), see DS-3802
As of Spring-Boot 1.5, org.json:json is no longer used by spring-boot-starter-test -->
<exclusion>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<scope>test</scope>
<version>${json-path.version}</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path-assert</artifactId>
<version>${json-path.version}</version>
<scope>test</scope>
</dependency> </dependency>
<!-- The HAL Browser --> <!-- The HAL Browser -->
@@ -235,7 +216,6 @@
header (bad signature) during the tomcat startup header (bad signature) during the tomcat startup
force the use of the previous version as the jar file force the use of the previous version as the jar file
looks corrupted in the maven repository --> looks corrupted in the maven repository -->
<!-- <version>2.5.5.RELEASE</version> -->
</dependency> </dependency>
<!-- Add in Spring Security for AuthN and AuthZ --> <!-- Add in Spring Security for AuthN and AuthZ -->
@@ -276,13 +256,9 @@
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-services</artifactId> <artifactId>dspace-services</artifactId>
</dependency> </dependency>
<!-- Apache Commons Collections 4.1 (used for MultiValuedMap to store metadata values for views) -->
<!-- NOTE: Currently DSpace API / Services still used 3.2 and may need upgrading at some point -->
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId> <artifactId>commons-collections4</artifactId>
<version>4.1</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>joda-time</groupId> <groupId>joda-time</groupId>
@@ -291,10 +267,42 @@
<dependency> <dependency>
<groupId>com.nimbusds</groupId> <groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId> <artifactId>nimbus-jose-jwt</artifactId>
<version>4.23</version> <version>6.2</version>
</dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>${solr.client.version}</version>
</dependency> </dependency>
<!-- TEST DEPENDENCIES --> <!-- TEST DEPENDENCIES -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${spring-boot.version}</version>
<scope>test</scope>
<exclusions>
<!-- Temporary exclusion to avoid dependency conflict with version of org.json:json used by dspace-api.
NOTE: THIS CAN BE REMOVED ONCE WE UPGRADE TO SPRING-BOOT v1.5 (or above), see DS-3802
As of Spring-Boot 1.5, org.json:json is no longer used by spring-boot-starter-test -->
<exclusion>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
<scope>test</scope>
<version>${json-path.version}</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path-assert</artifactId>
<version>${json-path.version}</version>
<scope>test</scope>
</dependency>
<dependency> <!-- Keep jmockit before junit --> <dependency> <!-- Keep jmockit before junit -->
<groupId>org.jmockit</groupId> <groupId>org.jmockit</groupId>
<artifactId>jmockit</artifactId> <artifactId>jmockit</artifactId>
@@ -320,28 +328,146 @@
<artifactId>mockito-core</artifactId> <artifactId>mockito-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Solr Core is needed for Integration Tests (to run a MockSolrServer) -->
<!-- The following Solr / Lucene dependencies also support integration tests -->
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.apache.solr</groupId>
<artifactId>dspace-solr</artifactId> <artifactId>solr-core</artifactId>
<classifier>classes</classifier> <version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jmx</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-rewrite</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency>
<groupId>org.apache.solr</groupId>
<artifactId>solr-cell</artifactId>
<version>${solr.client.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-continuation</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-deploy</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jmx</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-rewrite</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlet</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-servlets</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-webapp</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-xml</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Reminder: Keep icu4j (in Parent POM) synced with version used by lucene-analyzers-icu below,
otherwise ICUFoldingFilterFactory may throw errors in tests. -->
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-icu</artifactId> <artifactId>lucene-analyzers-icu</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-smartcn</artifactId> <artifactId>lucene-analyzers-smartcn</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.lucene</groupId> <groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-stempel</artifactId> <artifactId>lucene-analyzers-stempel</artifactId>
<version>${solr.version}</version> <version>${solr.client.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>

View File

@@ -0,0 +1,153 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.converter;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.model.AInprogressSubmissionRest;
import org.dspace.app.rest.model.ErrorRest;
import org.dspace.app.rest.model.SubmissionDefinitionRest;
import org.dspace.app.rest.model.SubmissionSectionRest;
import org.dspace.app.rest.submit.AbstractRestProcessingStep;
import org.dspace.app.rest.submit.SubmissionService;
import org.dspace.app.util.SubmissionConfigReader;
import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.app.util.SubmissionStepConfig;
import org.dspace.content.Collection;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.Item;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Abstract implementation providing the common functionalities for all the inprogressSubmission Converter
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
* @param <T>
* the DSpace API inprogressSubmission object
* @param <R>
* the DSpace REST inprogressSubmission representation
* @param <ID>
* the Serializable class used as primary key
*/
public abstract class AInprogressItemConverter<T extends InProgressSubmission<ID>,
R extends AInprogressSubmissionRest<ID>, ID extends Serializable>
implements IndexableDSpaceObjectConverter<T, R> {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(AInprogressItemConverter.class);
@Autowired
private EPersonConverter epersonConverter;
@Autowired
private ItemConverter itemConverter;
@Autowired
private CollectionConverter collectionConverter;
protected SubmissionConfigReader submissionConfigReader;
@Autowired
private SubmissionDefinitionConverter submissionDefinitionConverter;
@Autowired
private SubmissionSectionConverter submissionSectionConverter;
@Autowired
SubmissionService submissionService;
public AInprogressItemConverter() throws SubmissionConfigReaderException {
submissionConfigReader = new SubmissionConfigReader();
}
protected void fillFromModel(T obj, R witem) {
Collection collection = obj.getCollection();
Item item = obj.getItem();
EPerson submitter = null;
try {
submitter = obj.getSubmitter();
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
witem.setId(obj.getID());
witem.setCollection(collection != null ? collectionConverter.convert(collection) : null);
witem.setItem(itemConverter.convert(item));
witem.setSubmitter(epersonConverter.convert(submitter));
// 1. retrieve the submission definition
// 2. iterate over the submission section to allow to plugin additional
// info
if (collection != null) {
SubmissionDefinitionRest def = submissionDefinitionConverter
.convert(submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle()));
witem.setSubmissionDefinition(def);
for (SubmissionSectionRest sections : def.getPanels()) {
SubmissionStepConfig stepConfig = submissionSectionConverter.toModel(sections);
/*
* First, load the step processing class (using the current
* class loader)
*/
ClassLoader loader = this.getClass().getClassLoader();
Class stepClass;
try {
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
Object stepInstance = stepClass.newInstance();
if (stepInstance instanceof AbstractRestProcessingStep) {
// load the interface for this step
AbstractRestProcessingStep stepProcessing =
(AbstractRestProcessingStep) stepClass.newInstance();
for (ErrorRest error : stepProcessing.validate(submissionService, obj, stepConfig)) {
addError(witem.getErrors(), error);
}
witem.getSections()
.put(sections.getId(), stepProcessing.getData(submissionService, obj, stepConfig));
} else {
log.warn("The submission step class specified by '" + stepConfig.getProcessingClassName() +
"' does not extend the class org.dspace.app.rest.submit.AbstractRestProcessingStep!" +
" Therefore it cannot be used by the Configurable Submission as the " +
"<processing-class>!");
}
} catch (Exception e) {
log.error("An error occurred during the unmarshal of the data for the section " + sections.getId()
+ " - reported error: " + e.getMessage(), e);
}
}
}
}
private void addError(List<ErrorRest> errors, ErrorRest toAdd) {
boolean found = false;
String i18nKey = toAdd.getMessage();
if (StringUtils.isNotBlank(i18nKey)) {
for (ErrorRest error : errors) {
if (i18nKey.equals(error.getMessage())) {
error.getPaths().addAll(toAdd.getPaths());
found = true;
break;
}
}
}
if (!found) {
errors.add(toAdd);
}
}
}

View File

@@ -7,7 +7,6 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import org.apache.log4j.Logger;
import org.dspace.app.rest.model.ClaimedTaskRest; import org.dspace.app.rest.model.ClaimedTaskRest;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask; import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
@@ -25,11 +24,12 @@ import org.springframework.stereotype.Component;
public class ClaimedTaskConverter public class ClaimedTaskConverter
implements IndexableDSpaceObjectConverter<ClaimedTask, org.dspace.app.rest.model.ClaimedTaskRest> { implements IndexableDSpaceObjectConverter<ClaimedTask, org.dspace.app.rest.model.ClaimedTaskRest> {
private static final Logger log = Logger.getLogger(ClaimedTaskConverter.class);
@Autowired @Autowired
private WorkflowItemConverter workflowItemConverter; private WorkflowItemConverter workflowItemConverter;
@Autowired
private EPersonConverter epersonConverter;
@Override @Override
public ClaimedTaskRest fromModel(ClaimedTask obj) { public ClaimedTaskRest fromModel(ClaimedTask obj) {
ClaimedTaskRest taskRest = new ClaimedTaskRest(); ClaimedTaskRest taskRest = new ClaimedTaskRest();
@@ -39,6 +39,7 @@ public class ClaimedTaskConverter
taskRest.setWorkflowitem(workflowItemConverter.convert(witem)); taskRest.setWorkflowitem(workflowItemConverter.convert(witem));
taskRest.setAction(obj.getActionID()); taskRest.setAction(obj.getActionID());
taskRest.setStep(obj.getStepID()); taskRest.setStep(obj.getStepID());
taskRest.setOwner(epersonConverter.convert(obj.getOwner()));
return taskRest; return taskRest;
} }

View File

@@ -116,7 +116,8 @@ public class JsonPatchConverter implements PatchConverter<JsonNode> {
Object value = operation.getValue(); Object value = operation.getValue();
if (value != null) { if (value != null) {
opNode.set("value", mapper.valueToTree(value)); opNode.set("value", value instanceof JsonValueEvaluator ? ((JsonValueEvaluator) value).getValueNode()
: mapper.valueToTree(value));
} }
patchNode.add(opNode); patchNode.add(opNode);

View File

@@ -7,7 +7,6 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import org.apache.log4j.Logger;
import org.dspace.app.rest.model.PoolTaskRest; import org.dspace.app.rest.model.PoolTaskRest;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.xmlworkflow.storedcomponents.PoolTask; import org.dspace.xmlworkflow.storedcomponents.PoolTask;
@@ -25,8 +24,6 @@ import org.springframework.stereotype.Component;
public class PoolTaskConverter public class PoolTaskConverter
implements IndexableDSpaceObjectConverter<PoolTask, org.dspace.app.rest.model.PoolTaskRest> { implements IndexableDSpaceObjectConverter<PoolTask, org.dspace.app.rest.model.PoolTaskRest> {
private static final Logger log = Logger.getLogger(PoolTaskConverter.class);
@Autowired @Autowired
private WorkflowItemConverter workflowItemConverter; private WorkflowItemConverter workflowItemConverter;

View File

@@ -7,26 +7,10 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.rest.model.ErrorRest;
import org.dspace.app.rest.model.SubmissionDefinitionRest;
import org.dspace.app.rest.model.SubmissionSectionRest;
import org.dspace.app.rest.model.WorkflowItemRest; import org.dspace.app.rest.model.WorkflowItemRest;
import org.dspace.app.rest.submit.AbstractRestProcessingStep;
import org.dspace.app.rest.submit.SubmissionService;
import org.dspace.app.util.SubmissionConfigReader;
import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.app.util.SubmissionStepConfig;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.eperson.EPerson;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem; import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -38,95 +22,16 @@ import org.springframework.stereotype.Component;
*/ */
@Component @Component
public class WorkflowItemConverter public class WorkflowItemConverter
implements IndexableDSpaceObjectConverter<XmlWorkflowItem, org.dspace.app.rest.model.WorkflowItemRest> { extends AInprogressItemConverter<XmlWorkflowItem, org.dspace.app.rest.model.WorkflowItemRest, Integer> {
private static final Logger log = Logger.getLogger(WorkflowItemConverter.class);
@Autowired
private EPersonConverter epersonConverter;
@Autowired
private ItemConverter itemConverter;
@Autowired
private CollectionConverter collectionConverter;
private SubmissionConfigReader submissionConfigReader;
@Autowired
private SubmissionDefinitionConverter submissionDefinitionConverter;
@Autowired
private SubmissionSectionConverter submissionSectionConverter;
@Autowired
SubmissionService submissionService;
public WorkflowItemConverter() throws SubmissionConfigReaderException { public WorkflowItemConverter() throws SubmissionConfigReaderException {
submissionConfigReader = new SubmissionConfigReader(); super();
} }
@Override @Override
public WorkflowItemRest fromModel(XmlWorkflowItem obj) { public WorkflowItemRest fromModel(XmlWorkflowItem obj) {
WorkflowItemRest witem = new WorkflowItemRest(); WorkflowItemRest witem = new WorkflowItemRest();
fillFromModel(obj, witem);
Collection collection = obj.getCollection();
Item item = obj.getItem();
EPerson submitter = null;
try {
submitter = obj.getSubmitter();
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
witem.setId(obj.getID());
witem.setCollection(collection != null ? collectionConverter.convert(collection) : null);
witem.setItem(itemConverter.convert(item));
witem.setSubmitter(epersonConverter.convert(submitter));
// 1. retrieve the submission definition
// 2. iterate over the submission section to allow to plugin additional
// info
if (collection != null) {
SubmissionDefinitionRest def = submissionDefinitionConverter
.convert(submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle()));
witem.setSubmissionDefinition(def);
for (SubmissionSectionRest sections : def.getPanels()) {
SubmissionStepConfig stepConfig = submissionSectionConverter.toModel(sections);
/*
* First, load the step processing class (using the current
* class loader)
*/
ClassLoader loader = this.getClass().getClassLoader();
Class stepClass;
try {
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
Object stepInstance = stepClass.newInstance();
if (stepInstance instanceof AbstractRestProcessingStep) {
// load the interface for this step
AbstractRestProcessingStep stepProcessing =
(AbstractRestProcessingStep) stepClass.newInstance();
for (ErrorRest error : stepProcessing.validate(submissionService, obj, stepConfig)) {
addError(witem.getErrors(), error);
}
witem.getSections()
.put(sections.getId(), stepProcessing.getData(submissionService, obj, stepConfig));
} else {
log.warn("The submission step class specified by '" + stepConfig.getProcessingClassName() +
"' does not extend the class org.dspace.app.rest.submit.AbstractRestProcessingStep!" +
" Therefore it cannot be used by the Configurable Submission as the " +
"<processing-class>!");
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
return witem; return witem;
} }
@@ -135,25 +40,6 @@ public class WorkflowItemConverter
return null; return null;
} }
private void addError(List<ErrorRest> errors, ErrorRest toAdd) {
boolean found = false;
String i18nKey = toAdd.getMessage();
if (StringUtils.isNotBlank(i18nKey)) {
for (ErrorRest error : errors) {
if (i18nKey.equals(error.getMessage())) {
error.getPaths().addAll(toAdd.getPaths());
found = true;
break;
}
}
}
if (!found) {
errors.add(toAdd);
}
}
@Override @Override
public boolean supportsModel(IndexableObject object) { public boolean supportsModel(IndexableObject object) {
return object instanceof XmlWorkflowItem; return object instanceof XmlWorkflowItem;

View File

@@ -7,26 +7,10 @@
*/ */
package org.dspace.app.rest.converter; package org.dspace.app.rest.converter;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.dspace.app.rest.model.ErrorRest;
import org.dspace.app.rest.model.SubmissionDefinitionRest;
import org.dspace.app.rest.model.SubmissionSectionRest;
import org.dspace.app.rest.model.WorkspaceItemRest; import org.dspace.app.rest.model.WorkspaceItemRest;
import org.dspace.app.rest.submit.AbstractRestProcessingStep;
import org.dspace.app.rest.submit.SubmissionService;
import org.dspace.app.util.SubmissionConfigReader;
import org.dspace.app.util.SubmissionConfigReaderException; import org.dspace.app.util.SubmissionConfigReaderException;
import org.dspace.app.util.SubmissionStepConfig;
import org.dspace.browse.IndexableObject; import org.dspace.browse.IndexableObject;
import org.dspace.content.Collection;
import org.dspace.content.Item;
import org.dspace.content.WorkspaceItem; import org.dspace.content.WorkspaceItem;
import org.dspace.eperson.EPerson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -37,96 +21,17 @@ import org.springframework.stereotype.Component;
*/ */
@Component @Component
public class WorkspaceItemConverter public class WorkspaceItemConverter
implements extends AInprogressItemConverter<WorkspaceItem, WorkspaceItemRest, Integer> {
IndexableDSpaceObjectConverter<org.dspace.content.WorkspaceItem, org.dspace.app.rest.model.WorkspaceItemRest> {
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(WorkspaceItemConverter.class);
@Autowired
private EPersonConverter epersonConverter;
@Autowired
private ItemConverter itemConverter;
@Autowired
private CollectionConverter collectionConverter;
private SubmissionConfigReader submissionConfigReader;
@Autowired
private SubmissionDefinitionConverter submissionDefinitionConverter;
@Autowired
private SubmissionSectionConverter submissionSectionConverter;
@Autowired
SubmissionService submissionService;
public WorkspaceItemConverter() throws SubmissionConfigReaderException { public WorkspaceItemConverter() throws SubmissionConfigReaderException {
submissionConfigReader = new SubmissionConfigReader(); super();
} }
@Override @Override
public WorkspaceItemRest fromModel(org.dspace.content.WorkspaceItem obj) { public WorkspaceItemRest fromModel(org.dspace.content.WorkspaceItem obj) {
WorkspaceItemRest witem = new WorkspaceItemRest(); WorkspaceItemRest witem = new WorkspaceItemRest();
Collection collection = obj.getCollection(); fillFromModel(obj, witem);
Item item = obj.getItem();
EPerson submitter = null;
try {
submitter = obj.getSubmitter();
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
witem.setId(obj.getID());
witem.setCollection(collection != null ? collectionConverter.convert(collection) : null);
witem.setItem(itemConverter.convert(item));
witem.setSubmitter(epersonConverter.convert(submitter));
// 1. retrieve the submission definition
// 2. iterate over the submission section to allow to plugin additional
// info
if (collection != null) {
SubmissionDefinitionRest def = submissionDefinitionConverter
.convert(submissionConfigReader.getSubmissionConfigByCollection(collection.getHandle()));
witem.setSubmissionDefinition(def);
for (SubmissionSectionRest sections : def.getPanels()) {
SubmissionStepConfig stepConfig = submissionSectionConverter.toModel(sections);
/*
* First, load the step processing class (using the current
* class loader)
*/
ClassLoader loader = this.getClass().getClassLoader();
Class stepClass;
try {
stepClass = loader.loadClass(stepConfig.getProcessingClassName());
Object stepInstance = stepClass.newInstance();
if (stepInstance instanceof AbstractRestProcessingStep) {
// load the interface for this step
AbstractRestProcessingStep stepProcessing =
(AbstractRestProcessingStep) stepClass.newInstance();
for (ErrorRest error : stepProcessing.validate(submissionService, obj, stepConfig)) {
addError(witem.getErrors(), error);
}
witem.getSections()
.put(sections.getId(), stepProcessing.getData(submissionService, obj, stepConfig));
} else {
log.warn("The submission step class specified by '" + stepConfig.getProcessingClassName() +
"' does not extend the class org.dspace.app.rest.submit.AbstractRestProcessingStep!" +
" Therefore it cannot be used by the Configurable Submission as the " +
"<processing-class>!");
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
return witem; return witem;
} }
@@ -135,25 +40,6 @@ public class WorkspaceItemConverter
return null; return null;
} }
private void addError(List<ErrorRest> errors, ErrorRest toAdd) {
boolean found = false;
String i18nKey = toAdd.getMessage();
if (StringUtils.isNotBlank(i18nKey)) {
for (ErrorRest error : errors) {
if (i18nKey.equals(error.getMessage())) {
error.getPaths().addAll(toAdd.getPaths());
found = true;
break;
}
}
}
if (!found) {
errors.add(toAdd);
}
}
@Override @Override
public boolean supportsModel(IndexableObject object) { public boolean supportsModel(IndexableObject object) {
return object instanceof WorkspaceItem; return object instanceof WorkspaceItem;

View File

@@ -11,7 +11,6 @@ import static org.springframework.web.servlet.DispatcherServlet.EXCEPTION_ATTRIB
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
@@ -19,13 +18,16 @@ import org.dspace.app.rest.security.RestAuthenticationService;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.springframework.beans.TypeMismatchException; import org.springframework.beans.TypeMismatchException;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.data.repository.support.QueryMethodParameterConversionException; import org.springframework.data.repository.support.QueryMethodParameterConversionException;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.web.bind.MissingServletRequestParameterException; import org.springframework.web.bind.MissingServletRequestParameterException;
import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.context.request.WebRequest; import org.springframework.web.context.request.WebRequest;
import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
@@ -42,7 +44,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
@Autowired @Autowired
private RestAuthenticationService restAuthenticationService; private RestAuthenticationService restAuthenticationService;
@ExceptionHandler({AuthorizeException.class, RESTAuthorizationException.class}) @ExceptionHandler({AuthorizeException.class, RESTAuthorizationException.class, AccessDeniedException.class})
protected void handleAuthorizeException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void handleAuthorizeException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
if (restAuthenticationService.hasAuthenticationData(request)) { if (restAuthenticationService.hasAuthenticationData(request)) {
@@ -73,7 +75,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
HttpServletResponse.SC_INTERNAL_SERVER_ERROR); HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} }
@ExceptionHandler({MissingParameterException.class, QueryMethodParameterConversionException.class}) @ExceptionHandler( {MissingParameterException.class, QueryMethodParameterConversionException.class})
protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex) protected void ParameterConversionException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException { throws IOException {
@@ -87,7 +89,8 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
@Override @Override
protected ResponseEntity<Object> handleMissingServletRequestParameter(MissingServletRequestParameterException ex, protected ResponseEntity<Object> handleMissingServletRequestParameter(MissingServletRequestParameterException ex,
HttpHeaders headers, HttpStatus status, WebRequest request) { HttpHeaders headers, HttpStatus status,
WebRequest request) {
// we want the 422 status for missing parameter as it seems to be the common behavior for REST application, see // we want the 422 status for missing parameter as it seems to be the common behavior for REST application, see
// https://stackoverflow.com/questions/3050518/what-http-status-response-code-should-i-use-if-the-request-is-missing-a-required // https://stackoverflow.com/questions/3050518/what-http-status-response-code-should-i-use-if-the-request-is-missing-a-required
return super.handleMissingServletRequestParameter(ex, headers, HttpStatus.UNPROCESSABLE_ENTITY, request); return super.handleMissingServletRequestParameter(ex, headers, HttpStatus.UNPROCESSABLE_ENTITY, request);
@@ -102,6 +105,22 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH
return super.handleTypeMismatch(ex, headers, HttpStatus.UNPROCESSABLE_ENTITY, request); return super.handleTypeMismatch(ex, headers, HttpStatus.UNPROCESSABLE_ENTITY, request);
} }
@ExceptionHandler(Exception.class)
protected void handleGenericException(HttpServletRequest request, HttpServletResponse response, Exception ex)
throws IOException {
ResponseStatus responseStatusAnnotation = AnnotationUtils.findAnnotation(ex.getClass(), ResponseStatus.class);
int returnCode = 0;
if (responseStatusAnnotation != null) {
returnCode = responseStatusAnnotation.code().value();
} else {
returnCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
}
sendErrorResponse(request, response, ex, "An Exception has occured", returnCode);
}
private void sendErrorResponse(final HttpServletRequest request, final HttpServletResponse response, private void sendErrorResponse(final HttpServletRequest request, final HttpServletResponse response,
final Exception ex, final String message, final int statusCode) throws IOException { final Exception ex, final String message, final int statusCode) throws IOException {
//Make sure Spring picks up this exception //Make sure Spring picks up this exception

View File

@@ -0,0 +1,93 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.model;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore;
/**
* Abstract class to share common aspects between the REST representation of inprogressSubmission
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*
* @param <T>
* the serializable class used as primary key
*/
public abstract class AInprogressSubmissionRest<T extends Serializable> extends BaseObjectRest<T> {
private Date lastModified = new Date();
private Map<String, Serializable> sections;
@JsonIgnore
private CollectionRest collection;
@JsonIgnore
private ItemRest item;
@JsonIgnore
private SubmissionDefinitionRest submissionDefinition;
@JsonIgnore
private EPersonRest submitter;
public AInprogressSubmissionRest() {
super();
}
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public ItemRest getItem() {
return item;
}
public void setItem(ItemRest item) {
this.item = item;
}
public SubmissionDefinitionRest getSubmissionDefinition() {
return submissionDefinition;
}
public void setSubmissionDefinition(SubmissionDefinitionRest submissionDefinition) {
this.submissionDefinition = submissionDefinition;
}
public EPersonRest getSubmitter() {
return submitter;
}
public void setSubmitter(EPersonRest submitter) {
this.submitter = submitter;
}
public Map<String, Serializable> getSections() {
if (sections == null) {
sections = new HashMap<String, Serializable>();
}
return sections;
}
public void setSections(Map<String, Serializable> sections) {
this.sections = sections;
}
public CollectionRest getCollection() {
return collection;
}
public void setCollection(CollectionRest collection) {
this.collection = collection;
}
}

View File

@@ -24,6 +24,9 @@ public class ClaimedTaskRest extends BaseObjectRest<Integer> {
private String action; private String action;
@JsonIgnore
private EPersonRest owner;
@JsonIgnore @JsonIgnore
private WorkflowItemRest workflowitem; private WorkflowItemRest workflowitem;
@@ -66,6 +69,18 @@ public class ClaimedTaskRest extends BaseObjectRest<Integer> {
this.action = action; this.action = action;
} }
/**
* @see ClaimedTaskRest#getOwner()
* @return the owner of the task
*/
public EPersonRest getOwner() {
return owner;
}
public void setOwner(EPersonRest owner) {
this.owner = owner;
}
/** /**
* *
* @return the WorkflowItemRest that belong to this claimed task * @return the WorkflowItemRest that belong to this claimed task

View File

@@ -61,4 +61,9 @@ public class MetadataRest {
map.put(key, Arrays.asList(values)); map.put(key, Arrays.asList(values));
return this; return this;
} }
@Override
public boolean equals(Object object) {
return object instanceof MetadataRest && ((MetadataRest) object).getMap().equals(map);
}
} }

View File

@@ -7,12 +7,6 @@
*/ */
package org.dspace.app.rest.model; package org.dspace.app.rest.model;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.RestResourceController;
/** /**
@@ -20,26 +14,10 @@ import org.dspace.app.rest.RestResourceController;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
public class WorkflowItemRest extends BaseObjectRest<Integer> { public class WorkflowItemRest extends AInprogressSubmissionRest<Integer> {
public static final String NAME = "workflowitem"; public static final String NAME = "workflowitem";
public static final String CATEGORY = RestAddressableModel.WORKFLOW; public static final String CATEGORY = RestAddressableModel.WORKFLOW;
private Date lastModified = new Date();
private Map<String, Serializable> sections;
@JsonIgnore
private CollectionRest collection;
@JsonIgnore
private ItemRest item;
@JsonIgnore
private SubmissionDefinitionRest submissionDefinition;
@JsonIgnore
private EPersonRest submitter;
@Override @Override
public String getCategory() { public String getCategory() {
return CATEGORY; return CATEGORY;
@@ -50,83 +28,8 @@ public class WorkflowItemRest extends BaseObjectRest<Integer> {
return NAME; return NAME;
} }
/**
*
* @return the timestamp of the last modification done to the workflowitem
*/
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
/**
*
* @return the item wrapped by the workflowitem
*/
public ItemRest getItem() {
return item;
}
public void setItem(ItemRest item) {
this.item = item;
}
/**
*
* @return the SubmissionDefinition used by the workflowitem
*/
public SubmissionDefinitionRest getSubmissionDefinition() {
return submissionDefinition;
}
public void setSubmissionDefinition(SubmissionDefinitionRest submissionDefinition) {
this.submissionDefinition = submissionDefinition;
}
/**
*
* @return the submitter
*/
public EPersonRest getSubmitter() {
return submitter;
}
public void setSubmitter(EPersonRest submitter) {
this.submitter = submitter;
}
@Override @Override
public Class getController() { public Class getController() {
return RestResourceController.class; return RestResourceController.class;
} }
/**
*
* @return the data of the workflowitem organized according to the submission definition
*/
public Map<String, Serializable> getSections() {
if (sections == null) {
sections = new HashMap<String, Serializable>();
}
return sections;
}
public void setSections(Map<String, Serializable> sections) {
this.sections = sections;
}
/**
*
* @return the collection where the workflow is in progress
*/
public CollectionRest getCollection() {
return collection;
}
public void setCollection(CollectionRest collection) {
this.collection = collection;
}
} }

View File

@@ -7,12 +7,6 @@
*/ */
package org.dspace.app.rest.model; package org.dspace.app.rest.model;
import java.io.Serializable;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.RestResourceController;
/** /**
@@ -20,26 +14,10 @@ import org.dspace.app.rest.RestResourceController;
* *
* @author Andrea Bollini (andrea.bollini at 4science.it) * @author Andrea Bollini (andrea.bollini at 4science.it)
*/ */
public class WorkspaceItemRest extends BaseObjectRest<Integer> { public class WorkspaceItemRest extends AInprogressSubmissionRest<Integer> {
public static final String NAME = "workspaceitem"; public static final String NAME = "workspaceitem";
public static final String CATEGORY = RestAddressableModel.SUBMISSION; public static final String CATEGORY = RestAddressableModel.SUBMISSION;
private Date lastModified = new Date();
private Map<String, Serializable> sections;
@JsonIgnore
private CollectionRest collection;
@JsonIgnore
private ItemRest item;
@JsonIgnore
private SubmissionDefinitionRest submissionDefinition;
@JsonIgnore
private EPersonRest submitter;
@Override @Override
public String getCategory() { public String getCategory() {
return CATEGORY; return CATEGORY;
@@ -50,59 +28,8 @@ public class WorkspaceItemRest extends BaseObjectRest<Integer> {
return NAME; return NAME;
} }
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public ItemRest getItem() {
return item;
}
public void setItem(ItemRest item) {
this.item = item;
}
public SubmissionDefinitionRest getSubmissionDefinition() {
return submissionDefinition;
}
public void setSubmissionDefinition(SubmissionDefinitionRest submissionDefinition) {
this.submissionDefinition = submissionDefinition;
}
public EPersonRest getSubmitter() {
return submitter;
}
public void setSubmitter(EPersonRest submitter) {
this.submitter = submitter;
}
@Override @Override
public Class getController() { public Class getController() {
return RestResourceController.class; return RestResourceController.class;
} }
public Map<String, Serializable> getSections() {
if (sections == null) {
sections = new HashMap<String, Serializable>();
}
return sections;
}
public void setSections(Map<String, Serializable> sections) {
this.sections = sections;
}
public CollectionRest getCollection() {
return collection;
}
public void setCollection(CollectionRest collection) {
this.collection = collection;
}
} }

View File

@@ -44,4 +44,7 @@ public class JsonValueEvaluator implements LateObjectEvaluator {
} }
} }
public JsonNode getValueNode() {
return this.valueNode;
}
} }

View File

@@ -14,11 +14,14 @@ import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.converter.BitstreamConverter; import org.dspace.app.rest.converter.BitstreamConverter;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.BitstreamRest; import org.dspace.app.rest.model.BitstreamRest;
import org.dspace.app.rest.model.hateoas.BitstreamResource; import org.dspace.app.rest.model.hateoas.BitstreamResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
@@ -39,16 +42,15 @@ import org.springframework.stereotype.Component;
*/ */
@Component(BitstreamRest.CATEGORY + "." + BitstreamRest.NAME) @Component(BitstreamRest.CATEGORY + "." + BitstreamRest.NAME)
public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest, UUID> { public class BitstreamRestRepository extends DSpaceObjectRestRepository<Bitstream, BitstreamRest> {
private final BitstreamService bs;
@Autowired @Autowired
BitstreamService bs; public BitstreamRestRepository(BitstreamService dsoService,
BitstreamConverter dsoConverter) {
@Autowired super(dsoService, dsoConverter, new DSpaceObjectPatch<BitstreamRest>() { });
BitstreamConverter converter; this.bs = dsoService;
public BitstreamRestRepository() {
System.out.println("Repository initialized by Spring");
} }
@Override @Override
@@ -70,7 +72,7 @@ public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest,
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
return converter.fromModel(bit); return dsoConverter.fromModel(bit);
} }
@Override @Override
@@ -88,10 +90,17 @@ public class BitstreamRestRepository extends DSpaceRestRepository<BitstreamRest,
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<BitstreamRest> page = new PageImpl<Bitstream>(bit, pageable, total).map(converter); Page<BitstreamRest> page = new PageImpl<Bitstream>(bit, pageable, total).map(dsoConverter);
return page; return page;
} }
@Override
@PreAuthorize("hasPermission(#id, 'BITSTREAM', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override @Override
public Class<BitstreamRest> getDomainClass() { public Class<BitstreamRest> getDomainClass() {
return BitstreamRest.class; return BitstreamRest.class;

View File

@@ -27,12 +27,11 @@ import org.dspace.app.rest.model.ClaimedTaskRest;
import org.dspace.app.rest.model.PoolTaskRest; import org.dspace.app.rest.model.PoolTaskRest;
import org.dspace.app.rest.model.hateoas.ClaimedTaskResource; import org.dspace.app.rest.model.hateoas.ClaimedTaskResource;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.dspace.event.Event;
import org.dspace.workflow.WorkflowException; import org.dspace.workflow.WorkflowException;
import org.dspace.xmlworkflow.WorkflowConfigurationException; import org.dspace.xmlworkflow.WorkflowConfigurationException;
import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory; import org.dspace.xmlworkflow.factory.XmlWorkflowServiceFactory;
@@ -48,6 +47,8 @@ import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -79,7 +80,11 @@ public class ClaimedTaskRestRepository extends DSpaceRestRepository<ClaimedTaskR
@Autowired @Autowired
WorkflowRequirementsService workflowRequirementsService; WorkflowRequirementsService workflowRequirementsService;
@Autowired
AuthorizeService authorizeService;
@Override @Override
@PreAuthorize("hasPermission(#id, 'CLAIMEDTASK', 'READ')")
public ClaimedTaskRest findOne(Context context, Integer id) { public ClaimedTaskRest findOne(Context context, Integer id) {
ClaimedTask task = null; ClaimedTask task = null;
try { try {
@@ -94,12 +99,24 @@ public class ClaimedTaskRestRepository extends DSpaceRestRepository<ClaimedTaskR
} }
@SearchRestMethod(name = "findByUser") @SearchRestMethod(name = "findByUser")
public Page<ClaimedTaskRest> findByUser(@Parameter(value = "uuid") UUID userID, Pageable pageable) { public Page<ClaimedTaskRest> findByUser(@Parameter(value = "uuid", required = true) UUID userID,
Pageable pageable) {
//FIXME this should be secured with annotation but they are currently ignored by search methods
List<ClaimedTask> tasks = null; List<ClaimedTask> tasks = null;
try { try {
Context context = obtainContext(); Context context = obtainContext();
EPerson currentUser = context.getCurrentUser();
if (currentUser == null) {
throw new RESTAuthorizationException(
"This endpoint is available only to logged-in user to search for their"
+ " own claimed tasks or the admins");
}
if (authorizeService.isAdmin(context) || userID.equals(currentUser.getID())) {
EPerson ep = epersonService.find(context, userID); EPerson ep = epersonService.find(context, userID);
tasks = claimedTaskService.findByEperson(context, ep); tasks = claimedTaskService.findByEperson(context, ep);
} else {
throw new RESTAuthorizationException("Only administrators can search for claimed tasks of other users");
}
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
@@ -118,10 +135,14 @@ public class ClaimedTaskRestRepository extends DSpaceRestRepository<ClaimedTaskR
} }
@Override @Override
@PreAuthorize("hasPermission(#id, 'CLAIMEDTASK', 'WRITE')")
protected ClaimedTaskRest action(Context context, HttpServletRequest request, Integer id) protected ClaimedTaskRest action(Context context, HttpServletRequest request, Integer id)
throws SQLException, IOException { throws SQLException, IOException {
ClaimedTask task = null; ClaimedTask task = null;
task = claimedTaskService.find(context, id); task = claimedTaskService.find(context, id);
if (task == null) {
throw new ResourceNotFoundException("ClaimedTask ID " + id + " not found");
}
XmlWorkflowServiceFactory factory = (XmlWorkflowServiceFactory) XmlWorkflowServiceFactory.getInstance(); XmlWorkflowServiceFactory factory = (XmlWorkflowServiceFactory) XmlWorkflowServiceFactory.getInstance();
Workflow workflow; Workflow workflow;
try { try {
@@ -136,10 +157,6 @@ public class ClaimedTaskRestRepository extends DSpaceRestRepository<ClaimedTaskR
throw new UnprocessableEntityException( throw new UnprocessableEntityException(
"Missing required fields: " + StringUtils.join(Action.getErrorFields(request), ",")); "Missing required fields: " + StringUtils.join(Action.getErrorFields(request), ","));
} }
// workflowRequirementsService.removeClaimedUser(context, task.getWorkflowItem(), task.getOwner(), task
// .getStepID());
context.addEvent(new Event(Event.MODIFY, Constants.ITEM, task.getWorkflowItem().getItem().getID(), null,
itemService.getIdentifiers(context, task.getWorkflowItem().getItem())));
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
throw new RESTAuthorizationException(e); throw new RESTAuthorizationException(e);
} catch (WorkflowException e) { } catch (WorkflowException e) {
@@ -157,15 +174,17 @@ public class ClaimedTaskRestRepository extends DSpaceRestRepository<ClaimedTaskR
* enough other claimed tasks for the same workflowitem. * enough other claimed tasks for the same workflowitem.
* *
*/ */
@PreAuthorize("hasPermission(#id, 'CLAIMEDTASK', 'DELETE')")
protected void delete(Context context, Integer id) { protected void delete(Context context, Integer id) {
ClaimedTask task = null; ClaimedTask task = null;
try { try {
task = claimedTaskService.find(context, id); task = claimedTaskService.find(context, id);
if (task == null) {
throw new ResourceNotFoundException("ClaimedTask ID " + id + " not found");
}
XmlWorkflowItem workflowItem = task.getWorkflowItem(); XmlWorkflowItem workflowItem = task.getWorkflowItem();
workflowService.deleteClaimedTask(context, workflowItem, task); workflowService.deleteClaimedTask(context, workflowItem, task);
workflowRequirementsService.removeClaimedUser(context, workflowItem, task.getOwner(), task.getStepID()); workflowRequirementsService.removeClaimedUser(context, workflowItem, task.getOwner(), task.getStepID());
context.addEvent(new Event(Event.MODIFY, Constants.ITEM, workflowItem.getItem().getID(), null,
itemService.getIdentifiers(context, workflowItem.getItem())));
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
throw new RESTAuthorizationException(e); throw new RESTAuthorizationException(e);
} catch (SQLException | IOException | WorkflowConfigurationException e) { } catch (SQLException | IOException | WorkflowConfigurationException e) {

View File

@@ -12,7 +12,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.ServletInputStream; import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.BadRequestException; import javax.ws.rs.BadRequestException;
@@ -29,6 +28,8 @@ import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.CollectionRest; import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.CommunityRest; import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.hateoas.CollectionResource; import org.dspace.app.rest.model.hateoas.CollectionResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.app.rest.utils.CollectionRestEqualityUtils; import org.dspace.app.rest.utils.CollectionRestEqualityUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -53,14 +54,13 @@ import org.springframework.stereotype.Component;
*/ */
@Component(CollectionRest.CATEGORY + "." + CollectionRest.NAME) @Component(CollectionRest.CATEGORY + "." + CollectionRest.NAME)
public class CollectionRestRepository extends DSpaceRestRepository<CollectionRest, UUID> { public class CollectionRestRepository extends DSpaceObjectRestRepository<Collection, CollectionRest> {
private final CollectionService cs;
@Autowired @Autowired
CommunityService communityService; CommunityService communityService;
@Autowired
CollectionService cs;
@Autowired @Autowired
CollectionConverter converter; CollectionConverter converter;
@@ -71,8 +71,10 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
CollectionRestEqualityUtils collectionRestEqualityUtils; CollectionRestEqualityUtils collectionRestEqualityUtils;
public CollectionRestRepository() { public CollectionRestRepository(CollectionService dsoService,
System.out.println("Repository initialized by Spring"); CollectionConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CollectionRest>() {});
this.cs = dsoService;
} }
@Override @Override
@@ -87,7 +89,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
if (collection == null) { if (collection == null) {
return null; return null;
} }
return converter.fromModel(collection); return dsoConverter.fromModel(collection);
} }
@Override @Override
@@ -104,7 +106,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CollectionRest> page = new PageImpl<Collection>(collections, pageable, total).map(converter); Page<CollectionRest> page = new PageImpl<Collection>(collections, pageable, total).map(dsoConverter);
return page; return page;
} }
@@ -128,7 +130,7 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CollectionRest> page = utils.getPage(collections, pageable).map(converter); Page<CollectionRest> page = utils.getPage(collections, pageable).map(dsoConverter);
return page; return page;
} }
@@ -145,10 +147,17 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CollectionRest> page = utils.getPage(collections, pageable).map(converter); Page<CollectionRest> page = utils.getPage(collections, pageable).map(dsoConverter);
return page; return page;
} }
@Override
@PreAuthorize("hasPermission(#id, 'COLLECTION', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override @Override
public Class<CollectionRest> getDomainClass() { public Class<CollectionRest> getDomainClass() {
return CollectionRest.class; return CollectionRest.class;
@@ -252,5 +261,4 @@ public class CollectionRestRepository extends DSpaceRestRepository<CollectionRes
throw new RuntimeException("Unable to delete collection because the logo couldn't be deleted", e); throw new RuntimeException("Unable to delete collection because the logo couldn't be deleted", e);
} }
} }
} }

View File

@@ -12,7 +12,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.ServletInputStream; import javax.servlet.ServletInputStream;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.BadRequestException; import javax.ws.rs.BadRequestException;
@@ -28,6 +27,8 @@ import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.CommunityRest; import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.hateoas.CommunityResource; import org.dspace.app.rest.model.hateoas.CommunityResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.app.rest.utils.CommunityRestEqualityUtils; import org.dspace.app.rest.utils.CommunityRestEqualityUtils;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Community; import org.dspace.content.Community;
@@ -49,10 +50,9 @@ import org.springframework.stereotype.Component;
*/ */
@Component(CommunityRest.CATEGORY + "." + CommunityRest.NAME) @Component(CommunityRest.CATEGORY + "." + CommunityRest.NAME)
public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest, UUID> { public class CommunityRestRepository extends DSpaceObjectRestRepository<Community, CommunityRest> {
@Autowired private final CommunityService cs;
CommunityService cs;
@Autowired @Autowired
CommunityConverter converter; CommunityConverter converter;
@@ -63,8 +63,10 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
@Autowired @Autowired
CommunityRestEqualityUtils communityRestEqualityUtils; CommunityRestEqualityUtils communityRestEqualityUtils;
public CommunityRestRepository() { public CommunityRestRepository(CommunityService dsoService,
System.out.println("Repository initialized by Spring"); CommunityConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<CommunityRest>() {});
this.cs = dsoService;
} }
@Override @Override
@@ -107,7 +109,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
return converter.convert(community); return dsoConverter.convert(community);
} }
@Override @Override
@@ -122,7 +124,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
if (community == null) { if (community == null) {
return null; return null;
} }
return converter.fromModel(community); return dsoConverter.fromModel(community);
} }
@Override @Override
@@ -139,7 +141,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CommunityRest> page = new PageImpl<Community>(communities, pageable, total).map(converter); Page<CommunityRest> page = new PageImpl<Community>(communities, pageable, total).map(dsoConverter);
return page; return page;
} }
@@ -153,7 +155,7 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CommunityRest> page = utils.getPage(topCommunities, pageable).map(converter); Page<CommunityRest> page = utils.getPage(topCommunities, pageable).map(dsoConverter);
return page; return page;
} }
@@ -174,10 +176,17 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<CommunityRest> page = utils.getPage(subCommunities, pageable).map(converter); Page<CommunityRest> page = utils.getPage(subCommunities, pageable).map(dsoConverter);
return page; return page;
} }
@Override
@PreAuthorize("hasPermission(#id, 'COMMUNITY', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override @Override
public Class<CommunityRest> getDomainClass() { public Class<CommunityRest> getDomainClass() {
return CommunityRest.class; return CommunityRest.class;
@@ -233,5 +242,4 @@ public class CommunityRestRepository extends DSpaceRestRepository<CommunityRest,
throw new RuntimeException("Unable to delete community because the logo couldn't be deleted", e); throw new RuntimeException("Unable to delete community because the logo couldn't be deleted", e);
} }
} }
} }

View File

@@ -0,0 +1,86 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository;
import java.sql.SQLException;
import java.util.UUID;
import org.dspace.app.rest.converter.DSpaceObjectConverter;
import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.DSpaceObjectRest;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.DSpaceObjectService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
/**
* Base class for DSpaceObject-based Rest Repositories, providing common functionality.
*
* @param <M> the specific type of DSpaceObject.
* @param <R> the corresponding DSpaceObjectRest.
*/
public abstract class DSpaceObjectRestRepository<M extends DSpaceObject, R extends DSpaceObjectRest>
extends DSpaceRestRepository<R, UUID> {
final DSpaceObjectService<M> dsoService;
final DSpaceObjectPatch<R> dsoPatch;
final DSpaceObjectConverter<M, R> dsoConverter;
@Autowired
MetadataConverter metadataConverter;
DSpaceObjectRestRepository(DSpaceObjectService<M> dsoService,
DSpaceObjectConverter<M, R> dsoConverter,
DSpaceObjectPatch<R> dsoPatch) {
this.dsoService = dsoService;
this.dsoPatch = dsoPatch;
this.dsoConverter = dsoConverter;
}
/**
* Updates the DSpaceObject according to the given Patch.
*
* @param apiCategory the api category.
* @param model the api model.
* @param id the id of the DSpaceObject.
* @param patch the patch to apply.
* @throws AuthorizeException if the action is unauthorized.
* @throws ResourceNotFoundException if the DSpace object was not found.
* @throws SQLException if a database error occurs.
* @throws UnprocessableEntityException if the patch attempts to modify an unmodifiable attribute of the object.
*/
protected void patchDSpaceObject(String apiCategory, String model, UUID id, Patch patch)
throws AuthorizeException, ResourceNotFoundException, SQLException, UnprocessableEntityException {
M dso = dsoService.find(obtainContext(), id);
if (dso == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + id + " not found");
}
R dsoRest = dsoPatch.patch(findOne(id), patch.getOperations());
updateDSpaceObject(dso, dsoRest);
}
/**
* Applies the changes in the given rest DSpace object to the model DSpace object.
* The default implementation updates metadata if needed. Subclasses should extend
* to support updates of additional properties.
*
* @param dso the dso to apply changes to.
* @param dsoRest the rest representation of the new desired state.
*/
protected void updateDSpaceObject(M dso, R dsoRest)
throws AuthorizeException, SQLException {
R origDsoRest = dsoConverter.fromModel(dso);
if (!origDsoRest.getMetadata().equals(dsoRest.getMetadata())) {
metadataConverter.setMetadata(obtainContext(), dso, dsoRest.getMetadata());
}
}
}

View File

@@ -12,7 +12,6 @@ import java.sql.SQLException;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
@@ -21,25 +20,21 @@ import org.dspace.app.rest.Parameter;
import org.dspace.app.rest.SearchRestMethod; import org.dspace.app.rest.SearchRestMethod;
import org.dspace.app.rest.converter.EPersonConverter; import org.dspace.app.rest.converter.EPersonConverter;
import org.dspace.app.rest.converter.MetadataConverter; import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.exception.RESTAuthorizationException; import org.dspace.app.rest.exception.RESTAuthorizationException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.EPersonRest; import org.dspace.app.rest.model.EPersonRest;
import org.dspace.app.rest.model.hateoas.EPersonResource; import org.dspace.app.rest.model.hateoas.EPersonResource;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.EPersonPatch; import org.dspace.app.rest.repository.patch.EPersonPatch;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService; import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.data.rest.webmvc.ResourceNotFoundException;
import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
@@ -51,14 +46,12 @@ import org.springframework.stereotype.Component;
*/ */
@Component(EPersonRest.CATEGORY + "." + EPersonRest.NAME) @Component(EPersonRest.CATEGORY + "." + EPersonRest.NAME)
public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUID> { public class EPersonRestRepository extends DSpaceObjectRestRepository<EPerson, EPersonRest> {
EPersonService es = EPersonServiceFactory.getInstance().getEPersonService();
@Autowired @Autowired
AuthorizeService authorizeService; AuthorizeService authorizeService;
@Autowired private final EPersonService es;
EPersonConverter converter;
@Autowired @Autowired
MetadataConverter metadataConverter; MetadataConverter metadataConverter;
@@ -66,6 +59,13 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
@Autowired @Autowired
EPersonPatch epersonPatch; EPersonPatch epersonPatch;
public EPersonRestRepository(EPersonService dsoService,
EPersonConverter dsoConverter,
EPersonPatch dsoPatch) {
super(dsoService, dsoConverter, dsoPatch);
this.es = dsoService;
}
@Override @Override
protected EPersonRest createAndReturn(Context context) protected EPersonRest createAndReturn(Context context)
throws AuthorizeException { throws AuthorizeException {
@@ -97,7 +97,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
return converter.convert(eperson); return dsoConverter.convert(eperson);
} }
@Override @Override
@@ -112,7 +112,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
if (eperson == null) { if (eperson == null) {
return null; return null;
} }
return converter.fromModel(eperson); return dsoConverter.fromModel(eperson);
} }
@Override @Override
@@ -130,7 +130,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(converter); Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(dsoConverter);
return page; return page;
} }
@@ -156,7 +156,7 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(converter); Page<EPersonRest> page = new PageImpl<EPerson>(epersons, pageable, total).map(dsoConverter);
return page; return page;
} }
@@ -182,42 +182,22 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
if (eperson == null) { if (eperson == null) {
return null; return null;
} }
return converter.fromModel(eperson); return dsoConverter.fromModel(eperson);
} }
@Override @Override
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasAuthority('ADMIN')")
public void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid, protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid,
Patch patch) Patch patch) throws AuthorizeException, SQLException {
throws UnprocessableEntityException, PatchBadRequestException, AuthorizeException, patchDSpaceObject(apiCategory, model, uuid, patch);
ResourceNotFoundException {
try {
EPerson eperson = es.find(context, uuid);
if (eperson == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found");
}
List<Operation> operations = patch.getOperations();
EPersonRest ePersonRest = findOne(context, uuid);
EPersonRest patchedModel = (EPersonRest) epersonPatch.patch(ePersonRest, operations);
updatePatchedValues(context, patchedModel, eperson);
} catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e);
}
} }
/** @Override
* Applies changes in the rest model. protected void updateDSpaceObject(EPerson ePerson, EPersonRest ePersonRest)
* @param context throws AuthorizeException, SQLException {
* @param ePersonRest the updated eperson rest super.updateDSpaceObject(ePerson, ePersonRest);
* @param ePerson the eperson content object
* @throws SQLException
* @throws AuthorizeException
*/
private void updatePatchedValues(Context context, EPersonRest ePersonRest, EPerson ePerson)
throws SQLException, AuthorizeException {
Context context = obtainContext();
if (ePersonRest.getPassword() != null) { if (ePersonRest.getPassword() != null) {
es.setPassword(ePerson, ePersonRest.getPassword()); es.setPassword(ePerson, ePersonRest.getPassword());
} }
@@ -232,7 +212,6 @@ public class EPersonRestRepository extends DSpaceRestRepository<EPersonRest, UUI
} }
es.update(context, ePerson); es.update(context, ePerson);
} }
@Override @Override

View File

@@ -21,6 +21,8 @@ import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.GroupRest; import org.dspace.app.rest.model.GroupRest;
import org.dspace.app.rest.model.hateoas.GroupResource; import org.dspace.app.rest.model.hateoas.GroupResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
@@ -39,12 +41,16 @@ import org.springframework.stereotype.Component;
*/ */
@Component(GroupRest.CATEGORY + "." + GroupRest.NAME) @Component(GroupRest.CATEGORY + "." + GroupRest.NAME)
public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> { public class GroupRestRepository extends DSpaceObjectRestRepository<Group, GroupRest> {
@Autowired @Autowired
GroupService gs; GroupService gs;
@Autowired @Autowired
GroupConverter converter; GroupRestRepository(GroupService dsoService,
GroupConverter dsoConverter) {
super(dsoService, dsoConverter, new DSpaceObjectPatch<GroupRest>() {});
this.gs = dsoService;
}
@Autowired @Autowired
MetadataConverter metadataConverter; MetadataConverter metadataConverter;
@@ -73,7 +79,7 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
throw new RuntimeException(excSQL.getMessage(), excSQL); throw new RuntimeException(excSQL.getMessage(), excSQL);
} }
return converter.convert(group); return dsoConverter.convert(group);
} }
@Override @Override
@@ -88,7 +94,7 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
if (group == null) { if (group == null) {
return null; return null;
} }
return converter.fromModel(group); return dsoConverter.fromModel(group);
} }
@PreAuthorize("hasAuthority('ADMIN')") @PreAuthorize("hasAuthority('ADMIN')")
@@ -102,10 +108,17 @@ public class GroupRestRepository extends DSpaceRestRepository<GroupRest, UUID> {
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<GroupRest> page = new PageImpl<Group>(groups, pageable, total).map(converter); Page<GroupRest> page = new PageImpl<Group>(groups, pageable, total).map(dsoConverter);
return page; return page;
} }
@Override
@PreAuthorize("hasPermission(#id, 'GROUP', 'WRITE')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override @Override
public Class<GroupRest> getDomainClass() { public Class<GroupRest> getDomainClass() {
return GroupRest.class; return GroupRest.class;

View File

@@ -23,12 +23,10 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.app.rest.converter.ItemConverter; import org.dspace.app.rest.converter.ItemConverter;
import org.dspace.app.rest.converter.MetadataConverter; import org.dspace.app.rest.converter.MetadataConverter;
import org.dspace.app.rest.exception.PatchBadRequestException;
import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException;
import org.dspace.app.rest.exception.UnprocessableEntityException; import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.ItemRest; import org.dspace.app.rest.model.ItemRest;
import org.dspace.app.rest.model.hateoas.ItemResource; import org.dspace.app.rest.model.hateoas.ItemResource;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch; import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.ItemPatch; import org.dspace.app.rest.repository.patch.ItemPatch;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
@@ -56,15 +54,11 @@ import org.springframework.stereotype.Component;
*/ */
@Component(ItemRest.CATEGORY + "." + ItemRest.NAME) @Component(ItemRest.CATEGORY + "." + ItemRest.NAME)
public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> { public class ItemRestRepository extends DSpaceObjectRestRepository<Item, ItemRest> {
private static final Logger log = Logger.getLogger(ItemRestRepository.class); private static final Logger log = Logger.getLogger(ItemRestRepository.class);
@Autowired private final ItemService is;
ItemService is;
@Autowired
ItemConverter converter;
@Autowired @Autowired
MetadataConverter metadataConverter; MetadataConverter metadataConverter;
@@ -84,8 +78,11 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
@Autowired @Autowired
InstallItemService installItemService; InstallItemService installItemService;
public ItemRestRepository() { public ItemRestRepository(ItemService dsoService,
System.out.println("Repository initialized by Spring"); ItemConverter dsoConverter,
ItemPatch dsoPatch) {
super(dsoService, dsoConverter, dsoPatch);
this.is = dsoService;
} }
@Override @Override
@@ -100,7 +97,7 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
if (item == null) { if (item == null) {
return null; return null;
} }
return converter.fromModel(item); return dsoConverter.fromModel(item);
} }
@Override @Override
@@ -119,41 +116,23 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<ItemRest> page = new PageImpl<Item>(items, pageable, total).map(converter); Page<ItemRest> page = new PageImpl<Item>(items, pageable, total).map(dsoConverter);
return page; return page;
} }
@Override @Override
public void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID uuid, @PreAuthorize("hasPermission(#id, 'ITEM', 'WRITE')")
Patch patch) protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
throws UnprocessableEntityException, PatchBadRequestException, SQLException, AuthorizeException, Patch patch) throws AuthorizeException, SQLException {
ResourceNotFoundException { patchDSpaceObject(apiCategory, model, id, patch);
Item item = is.find(context, uuid);
if (item == null) {
throw new ResourceNotFoundException(apiCategory + "." + model + " with id: " + uuid + " not found");
} }
List<Operation> operations = patch.getOperations(); @Override
ItemRest itemRest = findOne(uuid); protected void updateDSpaceObject(Item item, ItemRest itemRest)
throws AuthorizeException, SQLException {
super.updateDSpaceObject(item, itemRest);
ItemRest patchedModel = (ItemRest) itemPatch.patch(itemRest, operations); Context context = obtainContext();
updatePatchedValues(context, patchedModel, item);
}
/**
* Persists changes to the rest model.
* @param context
* @param itemRest the updated item rest resource
* @param item the item content object
* @throws SQLException
* @throws AuthorizeException
*/
private void updatePatchedValues(Context context, ItemRest itemRest, Item item)
throws SQLException, AuthorizeException {
try {
if (itemRest.getWithdrawn() != item.isWithdrawn()) { if (itemRest.getWithdrawn() != item.isWithdrawn()) {
if (itemRest.getWithdrawn()) { if (itemRest.getWithdrawn()) {
is.withdraw(context, item); is.withdraw(context, item);
@@ -165,10 +144,6 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
item.setDiscoverable(itemRest.getDiscoverable()); item.setDiscoverable(itemRest.getDiscoverable());
is.update(context, item); is.update(context, item);
} }
} catch (SQLException | AuthorizeException e) {
e.printStackTrace();
throw e;
}
} }
@Override @Override
@@ -242,7 +217,7 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
Item itemToReturn = installItemService.installItem(context, workspaceItem); Item itemToReturn = installItemService.installItem(context, workspaceItem);
return converter.fromModel(itemToReturn); return dsoConverter.fromModel(itemToReturn);
} }
@Override @Override
@@ -271,6 +246,6 @@ public class ItemRestRepository extends DSpaceRestRepository<ItemRest, UUID> {
+ uuid + ", " + uuid + ", "
+ itemRest.getId()); + itemRest.getId());
} }
return converter.fromModel(item); return dsoConverter.fromModel(item);
} }
} }

View File

@@ -24,6 +24,7 @@ import org.dspace.app.rest.exception.UnprocessableEntityException;
import org.dspace.app.rest.model.PoolTaskRest; import org.dspace.app.rest.model.PoolTaskRest;
import org.dspace.app.rest.model.hateoas.PoolTaskResource; import org.dspace.app.rest.model.hateoas.PoolTaskResource;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.EPerson; import org.dspace.eperson.EPerson;
@@ -41,6 +42,7 @@ import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -72,7 +74,11 @@ public class PoolTaskRestRepository extends DSpaceRestRepository<PoolTaskRest, I
@Autowired @Autowired
WorkflowRequirementsService workflowRequirementsService; WorkflowRequirementsService workflowRequirementsService;
@Autowired
AuthorizeService authorizeService;
@Override @Override
@PreAuthorize("hasPermission(#id, 'POOLTASK', 'READ')")
public PoolTaskRest findOne(Context context, Integer id) { public PoolTaskRest findOne(Context context, Integer id) {
PoolTask task = null; PoolTask task = null;
try { try {
@@ -91,8 +97,19 @@ public class PoolTaskRestRepository extends DSpaceRestRepository<PoolTaskRest, I
List<PoolTask> tasks = null; List<PoolTask> tasks = null;
try { try {
Context context = obtainContext(); Context context = obtainContext();
//FIXME this should be secured with annotation but they are currently ignored by search methods
EPerson currentUser = context.getCurrentUser();
if (currentUser == null) {
throw new RESTAuthorizationException(
"This endpoint is available only to logged-in user"
+ " to search for their own pool tasks or the admins");
}
if (authorizeService.isAdmin(context) || userID.equals(currentUser.getID())) {
EPerson ep = epersonService.find(context, userID); EPerson ep = epersonService.find(context, userID);
tasks = poolTaskService.findByEperson(context, ep); tasks = poolTaskService.findByEperson(context, ep);
} else {
throw new RESTAuthorizationException("Only administrators can search for pool tasks of other users");
}
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
throw new RESTAuthorizationException(e); throw new RESTAuthorizationException(e);
} catch (SQLException | IOException e) { } catch (SQLException | IOException e) {
@@ -113,6 +130,7 @@ public class PoolTaskRestRepository extends DSpaceRestRepository<PoolTaskRest, I
} }
@Override @Override
@PreAuthorize("hasPermission(#id, 'POOLTASK', 'WRITE')")
protected PoolTaskRest action(Context context, HttpServletRequest request, Integer id) protected PoolTaskRest action(Context context, HttpServletRequest request, Integer id)
throws SQLException, IOException { throws SQLException, IOException {
PoolTask task = null; PoolTask task = null;

View File

@@ -11,10 +11,14 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import org.dspace.app.rest.converter.SiteConverter; import org.dspace.app.rest.converter.SiteConverter;
import org.dspace.app.rest.model.SiteRest; import org.dspace.app.rest.model.SiteRest;
import org.dspace.app.rest.model.hateoas.SiteResource; import org.dspace.app.rest.model.hateoas.SiteResource;
import org.dspace.app.rest.model.patch.Patch;
import org.dspace.app.rest.repository.patch.DSpaceObjectPatch;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Site; import org.dspace.content.Site;
import org.dspace.content.service.SiteService; import org.dspace.content.service.SiteService;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -22,6 +26,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/** /**
@@ -31,16 +36,15 @@ import org.springframework.stereotype.Component;
*/ */
@Component(SiteRest.CATEGORY + "." + SiteRest.NAME) @Component(SiteRest.CATEGORY + "." + SiteRest.NAME)
public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> { public class SiteRestRepository extends DSpaceObjectRestRepository<Site, SiteRest> {
private final SiteService sitesv;
@Autowired @Autowired
SiteService sitesv; public SiteRestRepository(SiteService dsoService,
SiteConverter dsoConverter) {
@Autowired super(dsoService, dsoConverter, new DSpaceObjectPatch<SiteRest>() {});
SiteConverter converter; this.sitesv = dsoService;
public SiteRestRepository() {
} }
@Override @Override
@@ -54,7 +58,7 @@ public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> {
if (site == null) { if (site == null) {
return null; return null;
} }
return converter.fromModel(site); return dsoConverter.fromModel(site);
} }
@Override @Override
@@ -66,10 +70,17 @@ public class SiteRestRepository extends DSpaceRestRepository<SiteRest, UUID> {
} catch (SQLException e) { } catch (SQLException e) {
throw new RuntimeException(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e);
} }
Page<SiteRest> page = new PageImpl<Site>(sites, pageable, total).map(converter); Page<SiteRest> page = new PageImpl<Site>(sites, pageable, total).map(dsoConverter);
return page; return page;
} }
@Override
@PreAuthorize("hasAuthority('ADMIN')")
protected void patch(Context context, HttpServletRequest request, String apiCategory, String model, UUID id,
Patch patch) throws AuthorizeException, SQLException {
patchDSpaceObject(apiCategory, model, id, patch);
}
@Override @Override
public Class<SiteRest> getDomainClass() { public Class<SiteRest> getDomainClass() {
return SiteRest.class; return SiteRest.class;

View File

@@ -30,7 +30,7 @@ public abstract class AbstractResourcePatch<R extends RestModel> {
* @throws UnprocessableEntityException * @throws UnprocessableEntityException
* @throws PatchBadRequestException * @throws PatchBadRequestException
*/ */
public RestModel patch(R restModel, List<Operation> operations) { public R patch(R restModel, List<Operation> operations) {
// Note: the list of possible operations is taken from JsonPatchConverter class. Does not implement // Note: the list of possible operations is taken from JsonPatchConverter class. Does not implement
// test https://tools.ietf.org/html/rfc6902#section-4.6 // test https://tools.ietf.org/html/rfc6902#section-4.6

View File

@@ -0,0 +1,81 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.repository.patch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.flipkart.zjsonpatch.JsonPatch;
import org.dspace.app.rest.converter.JsonPatchConverter;
import org.dspace.app.rest.model.DSpaceObjectRest;
import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.patch.Operation;
import org.dspace.app.rest.model.patch.Patch;
/**
* Base class for DSpaceObject-based PATCH operations, providing common functionality.
*
* @param <R> the type of DSpaceObjectRest object the class is applicable to.
*/
public abstract class DSpaceObjectPatch<R extends DSpaceObjectRest> extends AbstractResourcePatch<R> {
private static final String METADATA_PATH = "/metadata";
private ObjectMapper objectMapper = new ObjectMapper();
private JsonPatchConverter jsonPatchConverter = new JsonPatchConverter(objectMapper);
/**
* Applies the given patch operations to the given DSpaceObjectRest instance.
*
* This extends the default implementation by first applying metadata-based patch operations,
* then applying any others.
*
* @param dsoRest the instance to apply the changes to.
* @param operations the list of patch operations.
* @return the modified DSpaceObectRest instance.
*/
@Override
public R patch(R dsoRest, List<Operation> operations) {
List<Operation> metadataOperations = new ArrayList<>();
List<Operation> otherOperations = new ArrayList<>();
for (Operation operation : operations) {
String path = operation.getPath();
if (path.equals(METADATA_PATH) || path.startsWith(METADATA_PATH + "/")) {
metadataOperations.add(operation);
} else {
otherOperations.add(operation);
}
}
if (!metadataOperations.isEmpty()) {
dsoRest.setMetadata(applyMetadataPatch(
jsonPatchConverter.convert(new Patch(metadataOperations)),
dsoRest.getMetadata()));
}
return super.patch(dsoRest, otherOperations);
}
private MetadataRest applyMetadataPatch(JsonNode patch, MetadataRest metadataRest) {
try {
ObjectNode objectNode = objectMapper.createObjectNode();
JsonNode metadataNode = objectMapper.valueToTree(metadataRest);
objectNode.replace("metadata", metadataNode);
JsonPatch.applyInPlace(patch, objectNode);
return objectMapper.treeToValue(objectNode.get("metadata"), MetadataRest.class);
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
}

View File

@@ -20,7 +20,7 @@ import org.springframework.stereotype.Component;
* Provides patch operations for eperson updates. * Provides patch operations for eperson updates.
*/ */
@Component @Component
public class EPersonPatch extends AbstractResourcePatch<EPersonRest> { public class EPersonPatch extends DSpaceObjectPatch<EPersonRest> {
@Autowired @Autowired
EPersonOperationFactory patchFactory; EPersonOperationFactory patchFactory;

View File

@@ -20,7 +20,7 @@ import org.springframework.stereotype.Component;
* Provides PATCH operations for item updates. * Provides PATCH operations for item updates.
*/ */
@Component @Component
public class ItemPatch extends AbstractResourcePatch<ItemRest> { public class ItemPatch extends DSpaceObjectPatch<ItemRest> {
@Autowired @Autowired
ItemOperationFactory patchFactory; ItemOperationFactory patchFactory;

View File

@@ -0,0 +1,80 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.security;
import java.io.Serializable;
import java.sql.SQLException;
import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.RequestService;
import org.dspace.services.model.Request;
import org.dspace.xmlworkflow.storedcomponents.ClaimedTask;
import org.dspace.xmlworkflow.storedcomponents.service.ClaimedTaskService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.stereotype.Component;
/**
* An authenticated user is allowed to interact with a claimed task only if he own it
* claim.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
@Component
public class ClaimedTaskRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin {
private static final Logger log = LoggerFactory.getLogger(ClaimedTaskRestPermissionEvaluatorPlugin.class);
@Autowired
private RequestService requestService;
@Autowired
private ClaimedTaskService claimedTaskService;
@Autowired
private EPersonService ePersonService;
@Override
public boolean hasPermission(Authentication authentication, Serializable targetId,
String targetType, Object permission) {
if (Constants.getTypeID(targetType) != Constants.CLAIMEDTASK) {
return false;
}
Request request = requestService.getCurrentRequest();
Context context = ContextUtil.obtainContext(request.getServletRequest());
EPerson ePerson = null;
try {
ePerson = ePersonService.findByEmail(context, (String) authentication.getPrincipal());
if (ePerson == null) {
return false;
}
Integer dsoId = Integer.parseInt(targetId.toString());
ClaimedTask claimedTask = claimedTaskService.find(context, dsoId);
// If the claimed task is null then we give permission so we can throw another status code instead
if (claimedTask == null) {
return true;
}
// task's owner can interact with it
if (claimedTask != null && ePerson.equals(claimedTask.getOwner())) {
return true;
}
} catch (SQLException e) {
log.error(e.getMessage(), e);
}
return false;
}
}

View File

@@ -0,0 +1,85 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.rest.security;
import java.io.IOException;
import java.io.Serializable;
import java.sql.SQLException;
import org.dspace.app.rest.utils.ContextUtil;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.services.RequestService;
import org.dspace.services.model.Request;
import org.dspace.xmlworkflow.storedcomponents.PoolTask;
import org.dspace.xmlworkflow.storedcomponents.XmlWorkflowItem;
import org.dspace.xmlworkflow.storedcomponents.service.PoolTaskService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.stereotype.Component;
/**
* An authenticated user is allowed to interact with a pool task only if it is in his list.
*
* @author Andrea Bollini (andrea.bollini at 4science.it)
*/
@Component
public class PoolTaskRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin {
private static final Logger log = LoggerFactory.getLogger(PoolTaskRestPermissionEvaluatorPlugin.class);
@Autowired
private RequestService requestService;
@Autowired
private PoolTaskService poolTaskService;
@Autowired
private EPersonService ePersonService;
@Override
public boolean hasPermission(Authentication authentication, Serializable targetId,
String targetType, Object permission) {
if (Constants.getTypeID(targetType) != Constants.POOLTASK) {
return false;
}
Request request = requestService.getCurrentRequest();
Context context = ContextUtil.obtainContext(request.getServletRequest());
EPerson ePerson = null;
try {
ePerson = ePersonService.findByEmail(context, (String) authentication.getPrincipal());
if (ePerson == null) {
return false;
}
Integer dsoId = Integer.parseInt(targetId.toString());
PoolTask poolTask = poolTaskService.find(context, dsoId);
// If the pool task is null then we give permission so we can throw another status code instead
if (poolTask == null) {
return true;
}
XmlWorkflowItem workflowItem = poolTask.getWorkflowItem();
PoolTask poolTask2 = poolTaskService.findByWorkflowIdAndEPerson(context, workflowItem, ePerson);
if (poolTask2 != null && poolTask2.getID() == poolTask.getID()) {
return true;
}
} catch (SQLException | AuthorizeException | IOException e) {
log.error(e.getMessage(), e);
}
return false;
}
}

View File

@@ -213,19 +213,28 @@ public class SubmissionService {
char[] arr = new char[1024]; char[] arr = new char[1024];
StringBuilder buffer = new StringBuilder(); StringBuilder buffer = new StringBuilder();
int numCharsRead = reader.read(arr, 0, arr.length); int numCharsRead = reader.read(arr, 0, arr.length);
if (numCharsRead > 0) {
buffer.append(arr, 0, numCharsRead); buffer.append(arr, 0, numCharsRead);
}
if (numCharsRead == arr.length) { if (numCharsRead == arr.length) {
throw new RuntimeException("Malformed body... too long"); throw new UnprocessableEntityException("Malformed body... too long");
} }
String regex = "\\/api\\/" + WorkspaceItemRest.CATEGORY + "\\/" + English.plural(WorkspaceItemRest.NAME) String regex = "\\/api\\/" + WorkspaceItemRest.CATEGORY + "\\/" + English.plural(WorkspaceItemRest.NAME)
+ "\\/"; + "\\/";
String[] split = buffer.toString().split(regex, 2); String[] split = buffer.toString().split(regex, 2);
if (split.length != 2) { if (split.length != 2) {
throw new RuntimeException("Malformed body..." + buffer); throw new UnprocessableEntityException("Malformed body..." + buffer);
} }
// END FIXME // END FIXME
WorkspaceItem wsi = workspaceItemService.find(context, Integer.parseInt(split[1])); WorkspaceItem wsi = null;
try {
wsi = workspaceItemService.find(context, Integer.parseInt(split[1]));
} catch (NumberFormatException e) {
throw new UnprocessableEntityException("The provided workspaceitem URI is not valid");
}
if (wsi == null) {
throw new UnprocessableEntityException("Workspace item is not found");
}
if (!workspaceItemConverter.convert(wsi).getErrors().isEmpty()) { if (!workspaceItemConverter.convert(wsi).getErrors().isEmpty()) {
throw new UnprocessableEntityException( throw new UnprocessableEntityException(
"Start workflow failed due to validation error on workspaceitem"); "Start workflow failed due to validation error on workspaceitem");

View File

@@ -86,26 +86,23 @@ server.context-parameters.dspace.dir=${dspace.dir}
# Error handling settings # Error handling settings
# Always include the fullstacktrace in error pages # Always include the fullstacktrace in error pages
# (Our Error page hides this stacktrace so it only is visible in HTML source)
# Can be set to "never" if you don't want it. # Can be set to "never" if you don't want it.
server.error.include-stacktrace = always server.error.include-stacktrace = always
###################### ######################
# Spring Boot Autoconfigure # Spring Boot Autoconfigure
# #
# DISABLE a few autoconfiguration scripts, as DSpace configures these already # DISABLE a few autoconfiguration scripts, as DSpace initializes/configures these already
# * DataSourceAutoConfiguration (DB connection / datasource) # * DataSourceAutoConfiguration (DB connection / datasource)
# * FlywayAutoConfiguration (Flyway migrations) # * FlywayAutoConfiguration (Flyway migrations)
# * HibernateJpaAutoConfiguration (Hibernate)
# * SolrAutoConfiguration (Solr)
# #
# TODO: If we go with Spring boot, we should investigate whether it's worth # TODO: At some point we may want to investigate whether we can re-enable these and remove the custom DSpace init code
# re-enabling these and removing the custom DSpace initialization code
#spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \
# org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \
# org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration
spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \ spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration, \
org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \ org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration, \
org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration org.springframework.boot.autoconfigure.flyway.FlywayAutoConfiguration, \
org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration
######################### #########################
# Spring Boot Logging levels # Spring Boot Logging levels

View File

@@ -27,11 +27,13 @@ import org.dspace.app.rest.builder.ItemBuilder;
import org.dspace.app.rest.matcher.BitstreamFormatMatcher; import org.dspace.app.rest.matcher.BitstreamFormatMatcher;
import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BitstreamMatcher;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.content.Bitstream; import org.dspace.content.Bitstream;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.content.Item; import org.dspace.content.Item;
import org.dspace.content.service.BitstreamService; import org.dspace.content.service.BitstreamService;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
@@ -587,4 +589,25 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest
getClient(token).perform(delete("/api/core/bitstreams/" + col.getLogo().getID())) getClient(token).perform(delete("/api/core/bitstreams/" + col.getLogo().getID()))
.andExpect(status().is(422)); .andExpect(status().is(422));
} }
@Test
public void patchBitstreamMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchBitstreamMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community")
.build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/bitstreams/"
+ parentCommunity.getLogo().getID(), expectedStatus);
}
} }

View File

@@ -31,10 +31,12 @@ import org.dspace.app.rest.model.CollectionRest;
import org.dspace.app.rest.model.MetadataRest; import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest; import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -624,6 +626,24 @@ public class CollectionRestRepositoryIT extends AbstractControllerIntegrationTes
; ;
authorizeService.removePoliciesActionFilter(context, eperson, Constants.WRITE); authorizeService.removePoliciesActionFilter(context, eperson, Constants.WRITE);
}
public void patchCollectionMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchCollectionMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build();
Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/collections/" + col.getID(), expectedStatus);
} }
} }

View File

@@ -33,10 +33,12 @@ import org.dspace.app.rest.model.CommunityRest;
import org.dspace.app.rest.model.MetadataRest; import org.dspace.app.rest.model.MetadataRest;
import org.dspace.app.rest.model.MetadataValueRest; import org.dspace.app.rest.model.MetadataValueRest;
import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.AbstractControllerIntegrationTest;
import org.dspace.app.rest.test.MetadataPatchSuite;
import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Collection; import org.dspace.content.Collection;
import org.dspace.content.Community; import org.dspace.content.Community;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.eperson.EPerson;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
@@ -852,4 +854,22 @@ public class CommunityRestRepositoryIT extends AbstractControllerIntegrationTest
} }
public void patchCommunityMetadataAuthorized() throws Exception {
runPatchMetadataTests(admin, 200);
}
@Test
public void patchCommunityMetadataUnauthorized() throws Exception {
runPatchMetadataTests(eperson, 403);
}
private void runPatchMetadataTests(EPerson asUser, int expectedStatus) throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").build();
context.restoreAuthSystemState();
String token = getAuthToken(asUser.getEmail(), password);
new MetadataPatchSuite().runWith(getClient(token), "/api/core/communities/"
+ parentCommunity.getID(), expectedStatus);
}
} }

Some files were not shown because too many files have changed in this diff Show More